From 540990a9c198d9c29460b52272de8b8926e8dc88 Mon Sep 17 00:00:00 2001
From: klpoland
Date: Fri, 20 Feb 2026 14:16:47 -0500
Subject: [PATCH 1/9] adding time based filtering and slider ui
---
.../api_methods/helpers/temporal_filtering.py | 168 ++++++++++++++++++
.../serializers/capture_serializers.py | 56 +++++-
gateway/sds_gateway/api_methods/tasks.py | 62 ++++++-
.../js/actions/DownloadActionManager.js | 13 +-
gateway/sds_gateway/static/js/file-list.js | 6 +-
gateway/sds_gateway/templates/base.html | 3 +
.../templates/users/file_list.html | 2 +
.../users/partials/captures_page_table.html | 4 +-
.../users/partials/web_download_modal.html | 164 +++++++++++++++++
gateway/sds_gateway/users/views_deprecated.py | 11 ++
10 files changed, 476 insertions(+), 13 deletions(-)
create mode 100644 gateway/sds_gateway/api_methods/helpers/temporal_filtering.py
diff --git a/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py b/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py
new file mode 100644
index 000000000..16b648d91
--- /dev/null
+++ b/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py
@@ -0,0 +1,168 @@
+import re
+
+from django.db.models import QuerySet
+
+from sds_gateway.api_methods.models import CaptureType, Capture, File
+from sds_gateway.api_methods.utils.opensearch_client import get_opensearch_client
+from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
+from loguru import logger as log
+
+# Digital RF spec: rf@SECONDS.MILLISECONDS.h5 (e.g. rf@1396379502.000.h5)
+# https://github.com/MITHaystack/digital_rf
+DRF_RF_FILENAME_PATTERN = re.compile(
+ r"^rf@(\d+)\.(\d+)\.h5$",
+ re.IGNORECASE,
+)
+DRF_RF_FILENAME_REGEX_STR = r"^rf@\d+\.\d+\.h5$"
+
+
+def drf_rf_filename_from_ms(ms: int) -> str:
+ """Format ms as DRF rf data filename (canonical for range queries)."""
+ return f"rf@{ms // 1000}.{ms % 1000:03d}.h5"
+
+
+def drf_rf_filename_to_ms(file_name: str) -> int | None:
+ """
+ Parse DRF rf data filename to milliseconds.
+ Handles rf@SECONDS.MILLISECONDS.h5; fractional part padded to 3 digits.
+ """
+ name = file_name.strip()
+ match = DRF_RF_FILENAME_PATTERN.match(name)
+ if not match:
+ return None
+ try:
+ seconds = int(match.group(1))
+ frac = match.group(2).ljust(3, "0")[:3]
+ return seconds * 1000 + int(frac)
+ except (ValueError, TypeError):
+ return None
+
+
+def _catch_capture_type_error(capture_type: CaptureType) -> None:
+ if capture_type != CaptureType.DigitalRF:
+ msg = "Only DigitalRF captures are supported for temporal filtering."
+ log.error(msg)
+ raise ValueError(msg)
+
+
+def _parse_drf_rf_timestamp(file_name: str) -> int | None:
+ """Extract timestamp in ms from a Digital RF data filename (alias for drf_rf_filename_to_ms)."""
+ return drf_rf_filename_to_ms(file_name)
+
+
+def get_capture_bounds(capture_type: CaptureType, capture_uuid: str) -> tuple[int, int]:
+ """Get start and end bounds for capture from opensearch."""
+
+ _catch_capture_type_error(capture_type)
+
+ client = get_opensearch_client()
+ index = f"captures-{capture_type}"
+
+ try:
+ response = client.get(index=index, id=capture_uuid)
+ except Exception as e:
+ if getattr(e, "status_code", None) == 404 or (hasattr(e, "info") and e.info.get("status") == 404):
+ raise ValueError(
+ f"Capture {capture_uuid} not found in OpenSearch index {index}"
+ ) from e
+ raise
+
+ if not response.get("found"):
+ raise ValueError(
+ f"Capture {capture_uuid} not found in OpenSearch index {index}"
+ )
+
+ source = response["_source"]
+ search_props = source["search_props"]
+ start_time = search_props["start_time"]
+ end_time = search_props["end_time"]
+ print(f"start_time: {start_time}, end_time: {end_time}")
+ return start_time, end_time
+
+
+def get_data_files(capture_type: CaptureType, capture: Capture) -> QuerySet[File]:
+ """Get the data files in the capture."""
+ _catch_capture_type_error(capture_type)
+
+ return get_capture_files(capture).filter(name__regex=DRF_RF_FILENAME_REGEX_STR)
+
+
+def get_file_cadence(capture_type: CaptureType, capture: Capture) -> int:
+ """Get the file cadence in milliseconds. OpenSearch bounds are in seconds."""
+ _catch_capture_type_error(capture_type)
+
+ capture_uuid = str(capture.uuid)
+ try:
+ start_time, end_time = get_capture_bounds(capture_type, capture_uuid)
+ except ValueError as e:
+ log.error(e)
+ raise e
+
+ data_files = get_data_files(capture_type, capture)
+ count = data_files.count()
+ if count == 0:
+ return 0
+ duration_sec = end_time - start_time
+ print(f"duration_sec: {duration_sec}")
+ duration_ms = duration_sec * 1000
+ print(f"duration_ms: {duration_ms}")
+ return max(1, int(duration_ms / count))
+
+
+def get_duration_bounds(capture_type: CaptureType, capture_uuid: str, relative_time: int) -> tuple[int, int]:
+ """Return (0, length_of_capture_ms). OpenSearch bounds are in seconds."""
+ try:
+ start_time, end_time = get_capture_bounds(capture_type, capture_uuid)
+ except ValueError as e:
+ log.error(e)
+ raise e
+
+ length_of_capture_ms = (end_time - start_time) * 1000
+ return 0, length_of_capture_ms
+
+
+def filter_capture_data_files_selection_bounds(
+ capture_type: CaptureType,
+ capture: Capture,
+ start_time: int, # relative ms from start of capture (from UI)
+ end_time: int, # relative ms from start of capture (from UI)
+) -> QuerySet[File]:
+ """Filter the capture file selection bounds to the given start and end times."""
+ _catch_capture_type_error(capture_type)
+ epoch_start_sec, _ = get_capture_bounds(capture_type, capture.uuid)
+ epoch_start_ms = epoch_start_sec * 1000
+ start_ms = epoch_start_ms + start_time
+ end_ms = epoch_start_ms + end_time
+
+ start_file_name = drf_rf_filename_from_ms(start_ms)
+ end_file_name = drf_rf_filename_from_ms(end_ms)
+
+ data_files = get_data_files(capture_type, capture)
+ return data_files.filter(
+ name__gte=start_file_name,
+ name__lte=end_file_name,
+ ).order_by("name")
+
+def get_capture_files_with_temporal_filter(
+ capture_type: CaptureType,
+ capture: Capture,
+ start_time: int | None = None, # milliseconds since epoch (start of capture)
+ end_time: int | None = None, # milliseconds since epoch
+) -> QuerySet[File]:
+ """Get the capture files with temporal filtering."""
+ _catch_capture_type_error(capture_type)
+
+ if start_time is None or end_time is None:
+ log.warning("Start or end time is None, returning all capture files without temporal filtering")
+ return get_capture_files(capture)
+
+ # get non-data files
+ non_data_files = get_capture_files(capture).exclude(name__regex=DRF_RF_FILENAME_REGEX_STR)
+
+ # get data files with temporal filtering
+ data_files = filter_capture_data_files_selection_bounds(
+ capture_type, capture, start_time, end_time
+ )
+
+ # return all files
+ return non_data_files.union(data_files)
\ No newline at end of file
diff --git a/gateway/sds_gateway/api_methods/serializers/capture_serializers.py b/gateway/sds_gateway/api_methods/serializers/capture_serializers.py
index 037ebafd3..692628483 100644
--- a/gateway/sds_gateway/api_methods/serializers/capture_serializers.py
+++ b/gateway/sds_gateway/api_methods/serializers/capture_serializers.py
@@ -9,6 +9,8 @@
from rest_framework.utils.serializer_helpers import ReturnList
from sds_gateway.api_methods.helpers.index_handling import retrieve_indexed_metadata
+from sds_gateway.api_methods.helpers.temporal_filtering import get_capture_bounds
+from sds_gateway.api_methods.helpers.temporal_filtering import get_file_cadence
from sds_gateway.api_methods.models import Capture
from sds_gateway.api_methods.models import CaptureType
from sds_gateway.api_methods.models import DEPRECATEDPostProcessedData
@@ -70,6 +72,8 @@ class CaptureGetSerializer(serializers.ModelSerializer[Capture]):
files = serializers.SerializerMethodField()
center_frequency_ghz = serializers.SerializerMethodField()
sample_rate_mhz = serializers.SerializerMethodField()
+ length_of_capture_ms = serializers.SerializerMethodField()
+ file_cadence_ms = serializers.SerializerMethodField()
files_count = serializers.SerializerMethodField()
total_file_size = serializers.SerializerMethodField()
formatted_created_at = serializers.SerializerMethodField()
@@ -94,12 +98,29 @@ def get_files(self, capture: Capture) -> ReturnList[File]:
def get_center_frequency_ghz(self, capture: Capture) -> float | None:
"""Get the center frequency in GHz from the capture model property."""
return capture.center_frequency_ghz
-
- @extend_schema_field(serializers.FloatField)
+
+ @extend_schema_field(serializers.FloatField(allow_null=True))
def get_sample_rate_mhz(self, capture: Capture) -> float | None:
- """Get the sample rate in MHz from the capture model property."""
+ """Get the sample rate in MHz from the capture model property. None if not indexed in OpenSearch."""
return capture.sample_rate_mhz
+ @extend_schema_field(serializers.IntegerField(allow_null=True))
+ def get_length_of_capture_ms(self, capture: Capture) -> int | None:
+ """Get the length of the capture in milliseconds. OpenSearch bounds are in seconds."""
+ try:
+ start_time, end_time = get_capture_bounds(capture.capture_type, str(capture.uuid))
+ return (end_time - start_time) * 1000
+ except (ValueError, IndexError, KeyError):
+ return None
+
+ @extend_schema_field(serializers.IntegerField(allow_null=True))
+ def get_file_cadence_ms(self, capture: Capture) -> int | None:
+ """Get the file cadence in milliseconds. None if not indexed in OpenSearch."""
+ try:
+ return get_file_cadence(capture.capture_type, capture)
+ except (ValueError, IndexError, KeyError):
+ return None
+
@extend_schema_field(serializers.IntegerField)
def get_files_count(self, capture: Capture) -> int:
"""Get the count of files associated with this capture."""
@@ -304,6 +325,8 @@ class CompositeCaptureSerializer(serializers.Serializer):
files_count = serializers.SerializerMethodField()
total_file_size = serializers.SerializerMethodField()
formatted_created_at = serializers.SerializerMethodField()
+ length_of_capture_ms = serializers.SerializerMethodField()
+ file_cadence_ms = serializers.SerializerMethodField()
def get_files(self, obj: dict[str, Any]) -> ReturnList[File]:
"""Get all files from all channels in the composite capture."""
@@ -350,6 +373,33 @@ def get_formatted_created_at(self, obj: dict[str, Any]) -> str:
return created_at.strftime("%m/%d/%Y %I:%M:%S %p")
return ""
+ @extend_schema_field(serializers.IntegerField(allow_null=True))
+ def get_length_of_capture_ms(self, obj: dict[str, Any]) -> int | None:
+ """Use first channel's bounds for composite capture duration."""
+ channels = obj.get("channels") or []
+ if not channels:
+ return None
+ try:
+ capture = Capture.objects.get(uuid=channels[0]["uuid"])
+ start_time, end_time = get_capture_bounds(
+ capture.capture_type, str(capture.uuid)
+ )
+ return (end_time - start_time) * 1000
+ except (ValueError, IndexError, KeyError):
+ return None
+
+ @extend_schema_field(serializers.IntegerField(allow_null=True))
+ def get_file_cadence_ms(self, obj: dict[str, Any]) -> int | None:
+ """Use first channel's file cadence for composite capture."""
+ channels = obj.get("channels") or []
+ if not channels:
+ return None
+ try:
+ capture = Capture.objects.get(uuid=channels[0]["uuid"])
+ return get_file_cadence(capture.capture_type, capture)
+ except (ValueError, IndexError, KeyError):
+ return None
+
def build_composite_capture_data(captures: list[Capture]) -> dict[str, Any]:
"""Build composite capture data from a list of captures with the same top_level_dir.
diff --git a/gateway/sds_gateway/api_methods/tasks.py b/gateway/sds_gateway/api_methods/tasks.py
index e4aed2651..c7dff0b31 100644
--- a/gateway/sds_gateway/api_methods/tasks.py
+++ b/gateway/sds_gateway/api_methods/tasks.py
@@ -26,6 +26,7 @@
from sds_gateway.api_methods.models import TemporaryZipFile
from sds_gateway.api_methods.models import ZipFileStatus
from sds_gateway.api_methods.models import user_has_access_to_item
+from sds_gateway.api_methods.models import CaptureType
from sds_gateway.api_methods.utils.disk_utils import DISK_SPACE_BUFFER
from sds_gateway.api_methods.utils.disk_utils import check_disk_space_available
from sds_gateway.api_methods.utils.disk_utils import estimate_disk_size
@@ -676,15 +677,26 @@ def _process_item_files(
item_type: ItemType,
item_uuid: UUID,
temp_zip: TemporaryZipFile,
+ start_time: int | None = None,
+ end_time: int | None = None,
) -> tuple[Mapping[str, UUID | int | str] | None, str | None, int | None, int | None]: # pyright: ignore[reportMissingTypeArgument]
"""
Process files for an item and create a zip file.
+ Args:
+ user: The user requesting the files
+ item: The item object (Dataset or Capture)
+ item_type: Type of item (dataset or capture)
+ item_uuid: UUID of the item to download
+ temp_zip: The temporary zip file to create
+ start_time: Optional start time for temporal filtering
+ end_time: Optional end time for temporal filtering
+
Returns:
tuple: (error_response, zip_file_path, total_size, files_processed)
If error_response is not None, the other values are None
"""
- files = _get_item_files(user, item, item_type)
+ files = _get_item_files(user, item, item_type, start_time, end_time)
if not files:
log.warning(f"No files found for {item_type} {item_uuid}")
error_message = f"No files found in {item_type}"
@@ -979,7 +991,11 @@ def _handle_timeout_exception(
time_limit=30 * 60, soft_time_limit=25 * 60
) # 30 min hard limit, 25 min soft limit
def send_item_files_email( # noqa: C901, PLR0911, PLR0912, PLR0915
- item_uuid: UUID, user_id: str, item_type: str | ItemType
+ item_uuid: UUID,
+ user_id: str,
+ item_type: str | ItemType,
+ start_time: int | None = None,
+ end_time: int | None = None,
) -> Mapping[str, UUID | str | int]:
"""
Unified Celery task to create a zip file of item files and send it via email.
@@ -990,6 +1006,8 @@ def send_item_files_email( # noqa: C901, PLR0911, PLR0912, PLR0915
item_uuid: UUID of the item to process
user_id: ID of the user requesting the download
item_type: Type of item (dataset or capture)
+ start_time: Optional start time for temporal filtering
+ end_time: Optional end time for temporal filtering
Returns:
dict: Task result with status and details
"""
@@ -1053,6 +1071,8 @@ def send_item_files_email( # noqa: C901, PLR0911, PLR0912, PLR0915
item_type=item_type_enum,
item_uuid=item_uuid,
temp_zip=temp_zip,
+ start_time=start_time,
+ end_time=end_time,
)
)
if error_response:
@@ -1251,7 +1271,13 @@ def _validate_item_download_request(
return None, user, item
-def _get_item_files(user: User, item: Any, item_type: ItemType) -> list[File]:
+def _get_item_files(
+ user: User,
+ item: Any,
+ item_type: ItemType,
+ start_time: int | None = None,
+ end_time: int | None = None,
+) -> list[File]:
"""
Get all files for an item based on its type.
@@ -1259,14 +1285,16 @@ def _get_item_files(user: User, item: Any, item_type: ItemType) -> list[File]:
user: The user requesting the files
item: The item object (Dataset or Capture)
item_type: Type of item (dataset or capture)
-
+ start_time: Optional start time for temporal filtering
+ end_time: Optional end time for temporal filtering
Returns:
List of files associated with the item
"""
- from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
+ from sds_gateway.api_methods.helpers.temporal_filtering import get_capture_files_with_temporal_filter
from sds_gateway.api_methods.utils.relationship_utils import (
get_dataset_files_including_captures,
)
+ from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
if item_type == ItemType.DATASET:
files_queryset = get_dataset_files_including_captures(
@@ -1277,8 +1305,28 @@ def _get_item_files(user: User, item: Any, item_type: ItemType) -> list[File]:
return files
if item_type == ItemType.CAPTURE:
- files = get_capture_files(item, include_deleted=False)
- log.info(f"Found {len(files)} files for capture {item.uuid}")
+ capture_type = item.capture_type
+ # temporal filtering is only supported for DigitalRF captures
+ if capture_type is CaptureType.DigitalRF:
+ files = get_capture_files_with_temporal_filter(
+ capture_type=capture_type,
+ capture=item,
+ start_time=start_time,
+ end_time=end_time,
+ )
+ else:
+ if start_time is not None or end_time is not None:
+ logger.warning(
+ "Temporal filtering is only supported for DigitalRF captures, "
+ "ignoring start_time and end_time"
+ )
+
+ files = get_capture_files(
+ capture=item,
+ include_deleted=False,
+ )
+
+ logger.info(f"Found {len(files)} files for capture {item.uuid}")
return list(files)
log.warning(f"Unknown item type: {item_type}")
diff --git a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
index ca33ca45d..233cb9b43 100644
--- a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
+++ b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
@@ -274,7 +274,18 @@ class DownloadActionManager {
}
};
- window.DOMUtils.openModal(modalId);
+ // Initialize temporal slider from button data attributes (clears or builds slider)
+ const durationMs = parseInt(button.getAttribute("data-length-of-capture-ms"), 10);
+ const fileCadenceMs = parseInt(button.getAttribute("data-file-cadence-ms"), 10);
+ if (typeof window.initCaptureDownloadSlider === "function") {
+ window.initCaptureDownloadSlider(
+ Number.isNaN(durationMs) ? 0 : durationMs,
+ Number.isNaN(fileCadenceMs) ? 1000 : fileCadenceMs,
+ );
+ }
+
+ // Show the modal
+ window.showWebDownloadModal(captureUuid, captureName);
}
/**
diff --git a/gateway/sds_gateway/static/js/file-list.js b/gateway/sds_gateway/static/js/file-list.js
index aba3069c9..609c19f17 100644
--- a/gateway/sds_gateway/static/js/file-list.js
+++ b/gateway/sds_gateway/static/js/file-list.js
@@ -711,6 +711,8 @@ class FileListCapturesTableManager extends CapturesTableManager {
centerFrequencyGhz: ComponentUtils.escapeHtml(
capture.center_frequency_ghz || "",
),
+ lengthOfCaptureMs: capture.length_of_capture_ms ?? 0,
+ fileCadenceMs: capture.file_cadence_ms ?? 1000,
};
let typeDisplay = safeData.captureTypeDisplay || safeData.captureType;
@@ -835,7 +837,9 @@ class FileListCapturesTableManager extends CapturesTableManager {
+ data-capture-name="${safeData.name}"
+ data-length-of-capture-ms="${safeData.lengthOfCaptureMs}"
+ data-file-cadence-ms="${safeData.fileCadenceMs}">
Download
diff --git a/gateway/sds_gateway/templates/base.html b/gateway/sds_gateway/templates/base.html
index 2e14c130a..fdebfe804 100644
--- a/gateway/sds_gateway/templates/base.html
+++ b/gateway/sds_gateway/templates/base.html
@@ -19,6 +19,8 @@
href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.11.3/font/bootstrap-icons.min.css" />
+
{% block css %}
@@ -226,6 +228,7 @@
{# Removed JS that was hiding/showing the body #}
{% endblock inline_javascript %}
+
diff --git a/gateway/sds_gateway/users/views_deprecated.py b/gateway/sds_gateway/users/views_deprecated.py
index c46a96bf6..457c2e3b4 100644
--- a/gateway/sds_gateway/users/views_deprecated.py
+++ b/gateway/sds_gateway/users/views_deprecated.py
@@ -3334,6 +3334,15 @@ def post(
Returns:
A JSON response containing the download status
"""
+ # optional start and end times for temporal filtering
+ start_time = request.POST.get("start_time", None)
+ end_time = request.POST.get("end_time", None)
+
+ if start_time:
+ start_time = int(start_time)
+ if end_time:
+ end_time = int(end_time)
+
# Validate item type
if item_type not in self.ITEM_MODELS:
return JsonResponse(
@@ -3400,6 +3409,8 @@ def post(
str(item.uuid),
str(request.user.id),
item_type,
+ start_time=start_time,
+ end_time=end_time,
)
return JsonResponse(
From cd26a2e1229f74140c4cd333ca1737b5faba5956 Mon Sep 17 00:00:00 2001
From: klpoland
Date: Mon, 23 Feb 2026 09:17:22 -0500
Subject: [PATCH 2/9] fix label updates
---
gateway/sds_gateway/api_methods/tasks.py | 12 ++--
.../users/partials/web_download_modal.html | 55 +++++++++++--------
2 files changed, 40 insertions(+), 27 deletions(-)
diff --git a/gateway/sds_gateway/api_methods/tasks.py b/gateway/sds_gateway/api_methods/tasks.py
index c7dff0b31..7a13f4390 100644
--- a/gateway/sds_gateway/api_methods/tasks.py
+++ b/gateway/sds_gateway/api_methods/tasks.py
@@ -20,13 +20,13 @@
from redis import Redis
from sds_gateway.api_methods.models import Capture
+from sds_gateway.api_methods.models import CaptureType
from sds_gateway.api_methods.models import Dataset
from sds_gateway.api_methods.models import File
from sds_gateway.api_methods.models import ItemType
from sds_gateway.api_methods.models import TemporaryZipFile
from sds_gateway.api_methods.models import ZipFileStatus
from sds_gateway.api_methods.models import user_has_access_to_item
-from sds_gateway.api_methods.models import CaptureType
from sds_gateway.api_methods.utils.disk_utils import DISK_SPACE_BUFFER
from sds_gateway.api_methods.utils.disk_utils import check_disk_space_available
from sds_gateway.api_methods.utils.disk_utils import estimate_disk_size
@@ -1290,11 +1290,13 @@ def _get_item_files(
Returns:
List of files associated with the item
"""
- from sds_gateway.api_methods.helpers.temporal_filtering import get_capture_files_with_temporal_filter
+ from sds_gateway.api_methods.helpers.temporal_filtering import (
+ get_capture_files_with_temporal_filter,
+ )
+ from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
from sds_gateway.api_methods.utils.relationship_utils import (
get_dataset_files_including_captures,
)
- from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
if item_type == ItemType.DATASET:
files_queryset = get_dataset_files_including_captures(
@@ -1316,7 +1318,7 @@ def _get_item_files(
)
else:
if start_time is not None or end_time is not None:
- logger.warning(
+ log.warning(
"Temporal filtering is only supported for DigitalRF captures, "
"ignoring start_time and end_time"
)
@@ -1326,7 +1328,7 @@ def _get_item_files(
include_deleted=False,
)
- logger.info(f"Found {len(files)} files for capture {item.uuid}")
+ log.info(f"Found {len(files)} files for capture {item.uuid}")
return list(files)
log.warning(f"Unknown item type: {item_type}")
diff --git a/gateway/sds_gateway/templates/users/partials/web_download_modal.html b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
index 5275a8972..8fb67d878 100644
--- a/gateway/sds_gateway/templates/users/partials/web_download_modal.html
+++ b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
@@ -38,13 +38,14 @@
You will receive an email with a download link when the file is ready.
{% if item_type == "capture" %}
-
-
Filter capture files to download within a subset of time:
-
-
0:00:00 - 0:00:00
-
-
-
+
+
Filter capture files to download within a subset of time:
+
+
0:00:00.000 - 0:00:00.000
+
0 files
+
+
+
{% endif %}
Cancel
@@ -62,12 +63,16 @@
diff --git a/gateway/sds_gateway/users/tests/test_drf_views.py b/gateway/sds_gateway/users/tests/test_drf_views.py
index ebe29ee65..c3c6c8d65 100644
--- a/gateway/sds_gateway/users/tests/test_drf_views.py
+++ b/gateway/sds_gateway/users/tests/test_drf_views.py
@@ -2,6 +2,7 @@
import json
import uuid
+from unittest.mock import patch
import pytest
from django.conf import settings
@@ -326,60 +327,6 @@ def test_share_with_multiple_users(
assert permissions.filter(shared_with=user_to_share_with).exists()
assert permissions.filter(shared_with=user2).exists()
- def test_unified_download_dataset_success(
- self, client: Client, owner: User, dataset: Dataset
- ) -> None:
- """Test successful download request using the unified download endpoint."""
- client.force_login(owner)
- url = reverse(
- "users:download_item",
- kwargs={"item_type": ItemType.DATASET, "item_uuid": dataset.uuid},
- )
-
- response = client.post(url)
-
- assert response.status_code == status.HTTP_202_ACCEPTED
- result = response.json()
- assert result["success"] is True
- assert "download request accepted" in result["message"].lower()
- assert "task_id" in result
- assert result["item_name"] == dataset.name
- assert result["user_email"] == owner.email
-
- def test_unified_download_dataset_not_owner(
- self, client: Client, user_to_share_with: User, dataset: Dataset
- ) -> None:
- """Test download request when user is not the owner."""
- client.force_login(user_to_share_with)
- url = reverse(
- "users:download_item",
- kwargs={"item_type": ItemType.DATASET, "item_uuid": dataset.uuid},
- )
-
- response = client.post(url)
-
- assert response.status_code == status.HTTP_404_NOT_FOUND
- result = response.json()
- assert result["success"] is False
- assert "not found or access denied" in result["message"].lower()
-
- def test_unified_download_dataset_invalid_type(
- self, client: Client, owner: User, dataset: Dataset
- ) -> None:
- """Test download request with invalid item type."""
- client.force_login(owner)
- url = reverse(
- "users:download_item",
- kwargs={"item_type": "invalid_type", "item_uuid": dataset.uuid},
- )
-
- response = client.post(url)
-
- assert response.status_code == status.HTTP_400_BAD_REQUEST
- result = response.json()
- assert result["success"] is False
- assert "invalid item type" in result["message"].lower()
-
def test_share_with_group_individual_members_already_shared(
self, client: Client, owner: User, user_to_share_with: User, dataset: Dataset
) -> None:
@@ -543,3 +490,186 @@ def test_capture_share_modal_displays_groups_properly(
member_emails = [member["email"] for member in group_entry["members"]]
assert user_to_share_with.email in member_emails
assert user2.email in member_emails
+
+
+@pytest.mark.django_db
+class TestDownloadItemView:
+ """Tests for the DownloadItemView (unified download endpoint)."""
+
+ @pytest.fixture
+ def client(self) -> Client:
+ return Client()
+
+ @pytest.fixture
+ def owner(self) -> User:
+ """Create a user who owns items."""
+ return User.objects.create_user(
+ email="owner@example.com",
+ password=TEST_PASSWORD,
+ name="Owner User",
+ is_approved=True,
+ )
+
+ @pytest.fixture
+ def user_to_share_with(self) -> User:
+ """Create a user to share items with."""
+ return User.objects.create_user(
+ email="share@example.com",
+ password=TEST_PASSWORD,
+ name="Share User",
+ is_approved=True,
+ )
+
+ @pytest.fixture
+ def dataset(self, owner: User) -> Dataset:
+ """Create a dataset owned by the owner."""
+ return Dataset.objects.create(
+ uuid=uuid.uuid4(),
+ name="Test Dataset",
+ owner=owner,
+ description="A test dataset",
+ )
+
+ def test_unified_download_dataset_success(
+ self, client: Client, owner: User, dataset: Dataset
+ ) -> None:
+ """Test successful download request using the unified download endpoint."""
+ client.force_login(owner)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": ItemType.DATASET, "item_uuid": dataset.uuid},
+ )
+
+ response = client.post(url)
+
+ assert response.status_code == status.HTTP_202_ACCEPTED
+ result = response.json()
+ assert result["success"] is True
+ assert "download request accepted" in result["message"].lower()
+ assert "task_id" in result
+ assert result["item_name"] == dataset.name
+ assert result["user_email"] == owner.email
+
+ def test_unified_download_dataset_not_owner(
+ self, client: Client, user_to_share_with: User, dataset: Dataset
+ ) -> None:
+ """Test download request when user is not the owner."""
+ client.force_login(user_to_share_with)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": ItemType.DATASET, "item_uuid": dataset.uuid},
+ )
+
+ response = client.post(url)
+
+ assert response.status_code == status.HTTP_404_NOT_FOUND
+ result = response.json()
+ assert result["success"] is False
+ assert "not found or access denied" in result["message"].lower()
+
+ def test_unified_download_dataset_invalid_type(
+ self, client: Client, owner: User, dataset: Dataset
+ ) -> None:
+ """Test download request with invalid item type."""
+ client.force_login(owner)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": "invalid_type", "item_uuid": dataset.uuid},
+ )
+
+ response = client.post(url)
+
+ assert response.status_code == status.HTTP_400_BAD_REQUEST
+ result = response.json()
+ assert result["success"] is False
+ assert "invalid item type" in result["message"].lower()
+
+ def test_unified_download_capture_with_time_filter_success(
+ self, client: Client, owner: User
+ ) -> None:
+ """Test capture download request with start_time/end_time passes bounds to task."""
+ capture = Capture.objects.create(
+ uuid=uuid.uuid4(),
+ name="Test DRF Capture",
+ owner=owner,
+ capture_type="drf",
+ top_level_dir="/test",
+ index_name="captures-drf",
+ )
+ client.force_login(owner)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": ItemType.CAPTURE, "item_uuid": capture.uuid},
+ )
+ data = {"start_time": "1000", "end_time": "5000"}
+
+ with patch(
+ "sds_gateway.users.views.send_item_files_email"
+ ) as mock_send_task:
+ mock_send_task.delay.return_value = type("Result", (), {"id": "task-1"})()
+ response = client.post(url, data)
+
+ assert response.status_code == status.HTTP_202_ACCEPTED
+ result = response.json()
+ assert result["success"] is True
+ assert "download request accepted" in result["message"].lower()
+ mock_send_task.delay.assert_called_once()
+ call_kwargs = mock_send_task.delay.call_args[1]
+ assert call_kwargs["start_time"] == 1000
+ assert call_kwargs["end_time"] == 5000
+
+ def test_unified_download_capture_without_time_filter(
+ self, client: Client, owner: User
+ ) -> None:
+ """Test capture download without start_time/end_time passes None to task."""
+ capture = Capture.objects.create(
+ uuid=uuid.uuid4(),
+ name="Test DRF Capture",
+ owner=owner,
+ capture_type="drf",
+ top_level_dir="/test",
+ index_name="captures-drf",
+ )
+ client.force_login(owner)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": ItemType.CAPTURE, "item_uuid": capture.uuid},
+ )
+
+ with patch(
+ "sds_gateway.users.views.send_item_files_email"
+ ) as mock_send_task:
+ mock_send_task.delay.return_value = type("Result", (), {"id": "task-1"})()
+ response = client.post(url)
+
+ assert response.status_code == status.HTTP_202_ACCEPTED
+ mock_send_task.delay.assert_called_once()
+ call_kwargs = mock_send_task.delay.call_args[1]
+ assert call_kwargs.get("start_time") is None
+ assert call_kwargs.get("end_time") is None
+
+ def test_unified_download_capture_invalid_time_range(
+ self, client: Client, owner: User
+ ) -> None:
+ """Test capture download with start_time >= end_time returns 400."""
+ capture = Capture.objects.create(
+ uuid=uuid.uuid4(),
+ name="Test DRF Capture",
+ owner=owner,
+ capture_type="drf",
+ top_level_dir="/test",
+ index_name="captures-drf",
+ )
+ client.force_login(owner)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": ItemType.CAPTURE, "item_uuid": capture.uuid},
+ )
+ data = {"start_time": "5000", "end_time": "1000"}
+
+ response = client.post(url, data)
+
+ assert response.status_code == status.HTTP_400_BAD_REQUEST
+ result = response.json()
+ assert result["success"] is False
+ assert "start_time" in result["message"].lower() or "time range" in result["message"].lower()
diff --git a/gateway/sds_gateway/users/views_deprecated.py b/gateway/sds_gateway/users/views_deprecated.py
index ed643bd4f..552fb75a4 100644
--- a/gateway/sds_gateway/users/views_deprecated.py
+++ b/gateway/sds_gateway/users/views_deprecated.py
@@ -3301,6 +3301,35 @@ def _serve_file_download(self, zip_uuid: str, user) -> HttpResponse:
user_temporary_zip_download_view = TemporaryZipDownloadView.as_view()
+def _parse_optional_time(raw_value: str | None, param_name: str) -> tuple[int | None, JsonResponse | None]:
+ """Parse optional start/end time. Returns (value, None) or (None, error_response)."""
+ if raw_value in (None, ""):
+ return None, None
+ try:
+ value = int(raw_value)
+ except (TypeError, ValueError):
+ return None, JsonResponse(
+ {"success": False, "message": f"Invalid {param_name}; it must be an integer value."},
+ status=400,
+ )
+ if value < 0:
+ return None, JsonResponse(
+ {"success": False, "message": f"Invalid {param_name}; it must be greater than or equal to 0."},
+ status=400,
+ )
+ return value, None
+
+
+def _validate_time_range(start_time: int | None, end_time: int | None) -> JsonResponse | None:
+ """Return 400 JsonResponse if both provided and start >= end; else None."""
+ if start_time is not None and end_time is not None and start_time >= end_time:
+ return JsonResponse(
+ {"success": False, "message": "Invalid time range; start_time must be less than end_time."},
+ status=400,
+ )
+ return None
+
+
class DownloadItemView(Auth0LoginRequiredMixin, View):
"""
Unified view to handle item download requests for both datasets and captures.
@@ -3334,13 +3363,19 @@ def post(
Returns:
A JSON response containing the download status
"""
- # optional start and end times for temporal filtering
- start_time = request.POST.get("start_time") or None
- end_time = request.POST.get("end_time") or None
- if start_time is not None:
- start_time = int(start_time)
- if end_time is not None:
- end_time = int(end_time)
+ # Optional start and end times for temporal filtering
+ raw_start_time = request.POST.get("start_time")
+ raw_end_time = request.POST.get("end_time")
+
+ start_time, err = _parse_optional_time(raw_start_time, "start_time")
+ if err is not None:
+ return err
+ end_time, err = _parse_optional_time(raw_end_time, "end_time")
+ if err is not None:
+ return err
+ err = _validate_time_range(start_time, end_time)
+ if err is not None:
+ return err
# Validate item type
if item_type not in self.ITEM_MODELS:
From a414330e76a98195518693c41bf0859eabdac483 Mon Sep 17 00:00:00 2001
From: klpoland
Date: Fri, 6 Mar 2026 14:19:12 -0500
Subject: [PATCH 6/9] add flatpickr to handle datetime selection for better
control
---
.../js/actions/DownloadActionManager.js | 96 +++++++++++++------
gateway/sds_gateway/static/js/file-list.js | 18 ++++
gateway/sds_gateway/templates/base.html | 3 +
.../templates/users/file_list.html | 6 +-
.../users/partials/web_download_modal.html | 6 +-
5 files changed, 93 insertions(+), 36 deletions(-)
diff --git a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
index cb2da7332..5b23012a6 100644
--- a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
+++ b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
@@ -49,35 +49,32 @@ function formatUtcRange(startEpochSec, startMs, endMs) {
return fmt(startDate) + " - " + fmt(endDate) + " (UTC)";
}
-/** Format ms from capture start as datetime-local value (local time). */
-function msToDatetimeLocal(captureStartEpochSec, ms) {
+/** Format ms from capture start as UTC string for display (Y-m-d H:i:s). */
+function msToUtcString(captureStartEpochSec, ms) {
if (!Number.isFinite(captureStartEpochSec) || !Number.isFinite(ms)) return "";
const d = new Date(captureStartEpochSec * 1000 + ms);
const pad2 = (x) => String(x).padStart(2, "0");
- const pad3 = (x) => String(x).padStart(3, "0");
return (
- d.getFullYear() +
+ d.getUTCFullYear() +
"-" +
- pad2(d.getMonth() + 1) +
+ pad2(d.getUTCMonth() + 1) +
"-" +
- pad2(d.getDate()) +
- "T" +
- pad2(d.getHours()) +
+ pad2(d.getUTCDate()) +
+ " " +
+ pad2(d.getUTCHours()) +
":" +
- pad2(d.getMinutes()) +
+ pad2(d.getUTCMinutes()) +
":" +
- pad2(d.getSeconds()) +
- "." +
- pad3(d.getMilliseconds())
+ pad2(d.getUTCSeconds())
);
}
-/** Parse datetime-local value to ms from capture start (UTC epoch sec). */
-function datetimeLocalToMs(captureStartEpochSec, valueStr) {
- if (!Number.isFinite(captureStartEpochSec) || !valueStr || !valueStr.trim()) return NaN;
- const d = new Date(valueStr.trim());
- if (Number.isNaN(d.getTime())) return NaN;
- return d.getTime() - captureStartEpochSec * 1000;
+/** Parse UTC date string (Y-m-d H:i:s or Y-m-d H:i) to epoch ms. */
+function parseUtcStringToEpochMs(str) {
+ if (!str || !str.trim()) return NaN;
+ const s = str.trim();
+ const d = new Date(s.endsWith("Z") ? s : s.replace(" ", "T") + "Z");
+ return Number.isFinite(d.getTime()) ? d.getTime() : NaN;
}
class DownloadActionManager {
@@ -411,6 +408,33 @@ class DownloadActionManager {
endDateTimeEntry.disabled = !hasEpoch;
}
if (durationMs <= 0) return;
+ var fpStart = null, fpEnd = null;
+ var epochStart = captureStartEpochSec * 1000;
+ var epochEnd = epochStart + durationMs;
+ if (hasEpoch && typeof flatpickr !== 'undefined' && startDateTimeEntry && endDateTimeEntry) {
+ var fpOpts = {
+ enableTime: true,
+ enableSeconds: true,
+ utc: true,
+ dateFormat: 'Y-m-d H:i:S',
+ time_24hr: true,
+ minDate: epochStart,
+ maxDate: epochEnd,
+ allowInput: true,
+ static: true,
+ appendTo: webDownloadModal || undefined,
+ };
+ flatpickr(startDateTimeEntry, Object.assign({}, fpOpts, {
+ onChange: function() { syncFromDateTimeEntries(); }
+ }));
+ flatpickr(endDateTimeEntry, Object.assign({}, fpOpts, {
+ onChange: function() { syncFromDateTimeEntries(); }
+ }));
+ fpStart = startDateTimeEntry._flatpickr;
+ fpEnd = endDateTimeEntry._flatpickr;
+ startDateTimeEntry.disabled = false;
+ endDateTimeEntry.disabled = false;
+ }
noUiSlider.create(sliderEl, {
start: [0, durationMs],
connect: true,
@@ -444,8 +468,10 @@ class DownloadActionManager {
if (startTimeEntry) startTimeEntry.value = String(Math.round(startMs));
if (endTimeEntry) endTimeEntry.value = String(Math.round(endMs));
if (hasEpoch) {
- if (startDateTimeEntry) startDateTimeEntry.value = msToDatetimeLocal(captureStartEpochSec, startMs);
- if (endDateTimeEntry) endDateTimeEntry.value = msToDatetimeLocal(captureStartEpochSec, endMs);
+ if (fpStart && typeof fpStart.setDate === 'function') fpStart.setDate(epochStart + startMs);
+ else if (startDateTimeEntry) startDateTimeEntry.value = msToUtcString(captureStartEpochSec, startMs);
+ if (fpEnd && typeof fpEnd.setDate === 'function') fpEnd.setDate(epochStart + endMs);
+ else if (endDateTimeEntry) endDateTimeEntry.value = msToUtcString(captureStartEpochSec, endMs);
}
});
if (rangeLabel) {
@@ -471,10 +497,11 @@ class DownloadActionManager {
if (startTimeEntry) startTimeEntry.value = startVal;
if (endTimeEntry) endTimeEntry.value = endVal;
if (hasEpoch && startDateTimeEntry && endDateTimeEntry) {
- startDateTimeEntry.value = msToDatetimeLocal(captureStartEpochSec, 0);
- endDateTimeEntry.value = msToDatetimeLocal(captureStartEpochSec, durationMs);
- startDateTimeEntry.disabled = false;
- endDateTimeEntry.disabled = false;
+ if (fpStart && typeof fpStart.setDate === 'function') fpStart.setDate(epochStart);
+ else startDateTimeEntry.value = msToUtcString(captureStartEpochSec, 0);
+ if (fpEnd && typeof fpEnd.setDate === 'function') fpEnd.setDate(epochEnd);
+ else endDateTimeEntry.value = msToUtcString(captureStartEpochSec, durationMs);
+ if (!fpStart) { startDateTimeEntry.disabled = false; endDateTimeEntry.disabled = false; }
}
function syncSliderFromEntries() {
@@ -492,18 +519,28 @@ class DownloadActionManager {
}
function syncFromDateTimeEntries() {
if (!hasEpoch || !sliderEl.noUiSlider || !startDateTimeEntry || !endDateTimeEntry) return;
- var startMs = datetimeLocalToMs(captureStartEpochSec, startDateTimeEntry.value);
- var endMs = datetimeLocalToMs(captureStartEpochSec, endDateTimeEntry.value);
+ var startMs, endMs;
+ if (startDateTimeEntry._flatpickr && endDateTimeEntry._flatpickr) {
+ var dStart = startDateTimeEntry._flatpickr.selectedDates[0];
+ var dEnd = endDateTimeEntry._flatpickr.selectedDates[0];
+ startMs = dStart ? dStart.getTime() - epochStart : 0;
+ endMs = dEnd ? dEnd.getTime() - epochStart : durationMs;
+ } else {
+ startMs = parseUtcStringToEpochMs(startDateTimeEntry.value) - epochStart;
+ endMs = parseUtcStringToEpochMs(endDateTimeEntry.value) - epochStart;
+ }
if (Number.isNaN(startMs) || Number.isNaN(endMs)) return;
startMs = Math.max(0, Math.min(startMs, durationMs));
endMs = Math.max(0, Math.min(endMs, durationMs));
if (startMs >= endMs) endMs = Math.min(startMs + fileCadenceMs, durationMs);
+ var cur = sliderEl.noUiSlider.get();
+ if (Math.round(Number(cur[0])) === Math.round(startMs) && Math.round(Number(cur[1])) === Math.round(endMs)) return;
sliderEl.noUiSlider.set([startMs, endMs]);
}
if (startTimeEntry) startTimeEntry.addEventListener('change', syncSliderFromEntries);
if (endTimeEntry) endTimeEntry.addEventListener('change', syncSliderFromEntries);
- if (startDateTimeEntry) startDateTimeEntry.addEventListener('change', syncFromDateTimeEntries);
- if (endDateTimeEntry) endDateTimeEntry.addEventListener('change', syncFromDateTimeEntries);
+ if (startDateTimeEntry && !startDateTimeEntry._flatpickr) startDateTimeEntry.addEventListener('change', syncFromDateTimeEntries);
+ if (endDateTimeEntry && !endDateTimeEntry._flatpickr) endDateTimeEntry.addEventListener('change', syncFromDateTimeEntries);
}
/**
@@ -653,7 +690,6 @@ class DownloadActionManager {
const dataFilesTotalSizeRaw = button.getAttribute("data-data-files-total-size");
const dataFilesTotalSize = dataFilesTotalSizeRaw !== null && dataFilesTotalSizeRaw !== '' ? parseInt(dataFilesTotalSizeRaw, 10) : NaN;
const captureStartEpochSec = parseInt(button.getAttribute("data-capture-start-epoch-sec"), 10);
- const captureUuid = button.getAttribute("data-capture-uuid") || undefined;
this.initializeCaptureDownloadSlider(
Number.isNaN(durationMs) ? 0 : durationMs,
Number.isNaN(fileCadenceMs) ? 1000 : fileCadenceMs,
@@ -663,7 +699,7 @@ class DownloadActionManager {
dataFilesCount: Number.isNaN(dataFilesCount) ? 0 : dataFilesCount,
totalFilesCount: Number.isNaN(totalFilesCount) ? 0 : totalFilesCount,
dataFilesTotalSize: Number.isNaN(dataFilesTotalSize) ? undefined : dataFilesTotalSize,
- captureUuid: captureUuid,
+ captureUuid: captureUuid || undefined,
captureStartEpochSec: Number.isNaN(captureStartEpochSec) ? undefined : captureStartEpochSec,
},
);
diff --git a/gateway/sds_gateway/static/js/file-list.js b/gateway/sds_gateway/static/js/file-list.js
index 756526285..3f5175077 100644
--- a/gateway/sds_gateway/static/js/file-list.js
+++ b/gateway/sds_gateway/static/js/file-list.js
@@ -575,6 +575,24 @@ class FileListCapturesTableManager extends CapturesTableManager {
this.searchButtonLoading = document.getElementById("search-btn-loading");
}
+ /**
+ * Use web download modal (with temporal slider) when DownloadActionManager is available.
+ */
+ handleDownloadCapture(button) {
+ if (window.currentDownloadManager && document.getElementById("webDownloadModal")) {
+ const captureUuid = button.getAttribute("data-capture-uuid");
+ const captureName = button.getAttribute("data-capture-name") || captureUuid;
+ if (captureUuid) {
+ window.currentDownloadManager.handleCaptureDownload(
+ captureUuid,
+ captureName,
+ button,
+ );
+ }
+ return;
+ }
+ }
+
/**
* Override showLoading to toggle button contents instead of showing separate indicator
*/
diff --git a/gateway/sds_gateway/templates/base.html b/gateway/sds_gateway/templates/base.html
index fdebfe804..b121a037d 100644
--- a/gateway/sds_gateway/templates/base.html
+++ b/gateway/sds_gateway/templates/base.html
@@ -21,6 +21,8 @@
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" />
+
{% block css %}
@@ -44,6 +46,7 @@
+
{% endblock javascript %}
{# djlint:off H021 #}
diff --git a/gateway/sds_gateway/templates/users/file_list.html b/gateway/sds_gateway/templates/users/file_list.html
index 2e03a166e..d3d9a77c0 100644
--- a/gateway/sds_gateway/templates/users/file_list.html
+++ b/gateway/sds_gateway/templates/users/file_list.html
@@ -527,11 +527,11 @@ Upload Result
}
};
- // Initialize managers for captures
- const permissionsManager = new PermissionsManager(pageConfig.permissions);
+ // Initialize managers for captures (use window.* — classes are attached by module/scripts)
+ const permissionsManager = new window.PermissionsManager(pageConfig.permissions);
// Initialize download manager
- const downloadManager = new DownloadActionManager({
+ const downloadManager = new window.DownloadActionManager({
permissions: permissionsManager
});
diff --git a/gateway/sds_gateway/templates/users/partials/web_download_modal.html b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
index 0d0341f9c..8003acd2d 100644
--- a/gateway/sds_gateway/templates/users/partials/web_download_modal.html
+++ b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
@@ -4,7 +4,7 @@
tabindex="-1"
aria-labelledby="webDownloadModalLabel-{{ item.uuid }}"
aria-hidden="true">
-
+
@@ -1703,7 +1539,7 @@ class ModalManager {
// Primary file information - most useful for users
if (file.size) {
metadata.push(
- `Size: ${ComponentUtils.formatFileSize(file.size)} (${file.size.toLocaleString()} bytes)`,
+ `Size: ${window.DOMUtils.formatFileSize(file.size)} (${file.size.toLocaleString()} bytes)`,
);
}
diff --git a/gateway/sds_gateway/static/js/core/DOMUtils.js b/gateway/sds_gateway/static/js/core/DOMUtils.js
index 902196232..3309538f5 100644
--- a/gateway/sds_gateway/static/js/core/DOMUtils.js
+++ b/gateway/sds_gateway/static/js/core/DOMUtils.js
@@ -18,11 +18,17 @@ class DOMUtils {
* @returns {string} Formatted file size
*/
formatFileSize(bytes) {
- if (bytes === 0) return "0 Bytes";
- const k = 1024;
- const sizes = ["Bytes", "KB", "MB", "GB", "TB"];
- const i = Math.floor(Math.log(bytes) / Math.log(k));
- return `${Number.parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}`;
+ const n = Number(bytes);
+ if (!Number.isFinite(n) || n < 0) return "0 bytes";
+ if (n === 0) return "0 bytes";
+ const units = ["bytes", "KB", "MB", "GB"];
+ let i = 0;
+ let v = n;
+ while (v >= 1024 && i < units.length - 1) {
+ v /= 1024;
+ i++;
+ }
+ return (i === 0 ? v : v.toFixed(2)) + " " + units[i];
}
/**
diff --git a/gateway/sds_gateway/static/js/core/PageLifecycleManager.js b/gateway/sds_gateway/static/js/core/PageLifecycleManager.js
index cc6e03c0b..e3fa07f1d 100644
--- a/gateway/sds_gateway/static/js/core/PageLifecycleManager.js
+++ b/gateway/sds_gateway/static/js/core/PageLifecycleManager.js
@@ -82,6 +82,9 @@ class PageLifecycleManager {
case "capture-list":
this.initializeCaptureListPage();
break;
+ case "published-datasets-list":
+ this.initializePublishedDatasetsListPage();
+ break;
default:
console.warn(`Unknown page type: ${this.pageType}`);
}
@@ -165,6 +168,32 @@ class PageLifecycleManager {
this.initializeCaptureModals();
}
+ /**
+ * Published datasets search page: pagination + dataset modals (same modal wiring as dataset list, no sort UI).
+ */
+ initializePublishedDatasetsListPage() {
+ this.initializePagination();
+ this.initializeDatasetModals();
+ }
+
+
+ /**
+ * Single DownloadActionManager for document-wide .web-download-btn / SDK buttons (not per modal).
+ */
+ ensureDownloadActionManager() {
+ if (
+ this.downloadActionManager ||
+ !this.permissions ||
+ !window.DownloadActionManager
+ ) {
+ return;
+ }
+ this.downloadActionManager = new window.DownloadActionManager({
+ permissions: this.permissions,
+ });
+ this.managers.push(this.downloadActionManager);
+ }
+
/**
* Initialize search handlers
*/
@@ -269,6 +298,10 @@ class PageLifecycleManager {
* Initialize dataset modals
*/
initializeDatasetModals() {
+ // TODO: Refactor this to align all modal initialization
+ // with a single manager instance per modal type.
+ // Plan to do this on a future PR.
+
// Pre-initialize all modals on the page with proper config to prevent Bootstrap auto-initialization errors
const allModals = document.querySelectorAll(".modal");
for (const modal of allModals) {
@@ -299,6 +332,7 @@ class PageLifecycleManager {
for (const modal of datasetModals) {
const itemUuid = modal.getAttribute("data-item-uuid");
+ const itemType = modal.getAttribute("data-item-type");
if (!itemUuid || !this.permissions) {
console.warn(
@@ -309,9 +343,9 @@ class PageLifecycleManager {
if (window.ShareActionManager) {
const shareManager = new window.ShareActionManager({
- itemUuid: itemUuid,
- itemType: "dataset",
permissions: this.permissions,
+ itemUuid: itemUuid,
+ itemType: itemType,
});
this.managers.push(shareManager);
@@ -321,28 +355,18 @@ class PageLifecycleManager {
if (window.VersioningActionManager && !modal.versioningActionManager) {
const versioningManager = new window.VersioningActionManager({
- datasetUuid: itemUuid,
permissions: this.permissions,
+ datasetUuid: itemUuid,
});
this.managers.push(versioningManager);
modal.versioningActionManager = versioningManager;
}
- if (window.DownloadActionManager) {
- const downloadManager = new window.DownloadActionManager({
- permissions: this.permissions,
- });
- this.managers.push(downloadManager);
-
- // Store reference on modal
- modal.downloadActionManager = downloadManager;
- }
-
if (window.DetailsActionManager) {
const detailsManager = new window.DetailsActionManager({
permissions: this.permissions,
itemUuid: itemUuid,
- itemType: "dataset",
+ itemType: itemType,
});
this.managers.push(detailsManager);
@@ -350,18 +374,25 @@ class PageLifecycleManager {
modal.detailsActionManager = detailsManager;
}
}
+
+ this.ensureDownloadActionManager();
}
/**
* Initialize capture modals
*/
initializeCaptureModals() {
+ // TODO: Refactor this to align all modal initialization
+ // with a single manager instance per modal type.
+ // Plan to do this on a future PR.
+
const captureModals = document.querySelectorAll(
".modal[data-item-type='capture']",
);
for (const modal of captureModals) {
const itemUuid = modal.getAttribute("data-item-uuid");
+ const itemType = modal.getAttribute("data-item-type");
if (!itemUuid || !this.permissions) {
console.warn(
@@ -372,28 +403,18 @@ class PageLifecycleManager {
if (window.ShareActionManager) {
const shareManager = new window.ShareActionManager({
- itemUuid: itemUuid,
- itemType: "capture",
permissions: this.permissions,
+ itemUuid: itemUuid,
+ itemType: itemType,
});
this.managers.push(shareManager);
// Store reference on modal
modal.shareActionManager = shareManager;
}
-
- if (window.DownloadActionManager) {
- const downloadManager = new window.DownloadActionManager({
- itemUuid: itemUuid,
- itemType: "capture",
- permissions: this.permissions,
- });
- this.managers.push(downloadManager);
-
- // Store reference on modal
- modal.downloadActionManager = downloadManager;
- }
}
+
+ this.ensureDownloadActionManager();
}
/**
diff --git a/gateway/sds_gateway/static/js/file-list.js b/gateway/sds_gateway/static/js/file-list.js
index 3f5175077..6c660c27d 100644
--- a/gateway/sds_gateway/static/js/file-list.js
+++ b/gateway/sds_gateway/static/js/file-list.js
@@ -1,3 +1,9 @@
+/**
+ * TODO: This file has a lot of redundancy with manager files
+ * And needs to be deprecated. and have its functionality migrated
+ * to the new JS structure.
+ */
+
/* File List Page JavaScript - Refactored to use Components */
/**
@@ -575,24 +581,6 @@ class FileListCapturesTableManager extends CapturesTableManager {
this.searchButtonLoading = document.getElementById("search-btn-loading");
}
- /**
- * Use web download modal (with temporal slider) when DownloadActionManager is available.
- */
- handleDownloadCapture(button) {
- if (window.currentDownloadManager && document.getElementById("webDownloadModal")) {
- const captureUuid = button.getAttribute("data-capture-uuid");
- const captureName = button.getAttribute("data-capture-name") || captureUuid;
- if (captureUuid) {
- window.currentDownloadManager.handleCaptureDownload(
- captureUuid,
- captureName,
- button,
- );
- }
- return;
- }
- }
-
/**
* Override showLoading to toggle button contents instead of showing separate indicator
*/
diff --git a/gateway/sds_gateway/templates/users/components/dataset_list_table.html b/gateway/sds_gateway/templates/users/components/dataset_list_table.html
index ad45ff497..2257a7244 100644
--- a/gateway/sds_gateway/templates/users/components/dataset_list_table.html
+++ b/gateway/sds_gateway/templates/users/components/dataset_list_table.html
@@ -183,8 +183,8 @@
No datasets yet
+ data-item-uuid="{{ dataset.uuid }}"
+ data-item-type="dataset">
Web Download
diff --git a/gateway/sds_gateway/templates/users/dataset_list.html b/gateway/sds_gateway/templates/users/dataset_list.html
index baf0de549..4ead29d05 100644
--- a/gateway/sds_gateway/templates/users/dataset_list.html
+++ b/gateway/sds_gateway/templates/users/dataset_list.html
@@ -31,20 +31,7 @@ Datasets
-
- {% for dataset in page_obj %}
- {% include "users/partials/dataset_details_modal.html" with dataset=dataset %}
- {% include "users/partials/web_download_modal.html" with item=dataset item_type="dataset" %}
- {% include "users/partials/share_modal.html" with item=dataset item_type="dataset" %}
- {% include "users/partials/sdk_download_modal.html" with dataset=dataset %}
- {% if dataset.is_owner or dataset.permission_level == 'co-owner' %}
- {% include "users/partials/dataset_version_control.html" with dataset=dataset %}
- {% if not dataset.dataset.status == 'final' or not dataset.is_public %}
- {% include "users/partials/publish_dataset_modal.html" with dataset=dataset %}
- {% endif %}
- {% endif %}
- {% endfor %}
-
+ {% include "users/components/dataset_list_modals.html" with page_obj=page_obj %}
{% endblock content %}
{% block javascript %}
{# djlint:off #}
diff --git a/gateway/sds_gateway/templates/users/file_list.html b/gateway/sds_gateway/templates/users/file_list.html
index d3d9a77c0..6c6a830cb 100644
--- a/gateway/sds_gateway/templates/users/file_list.html
+++ b/gateway/sds_gateway/templates/users/file_list.html
@@ -369,8 +369,6 @@
Upload Result
{% include "users/partials/capture_modal.html" %}
-
- {% include "users/partials/web_download_modal.html" with item_type="capture" capture=capture_obj %}
{% if VISUALIZATIONS_ENABLED %}
{% include "visualizations/partials/visualization_modal.html" with visualization_compatibility=visualization_compatibility %}
diff --git a/gateway/sds_gateway/templates/users/files.html b/gateway/sds_gateway/templates/users/files.html
index 63e84d774..9cc93386a 100644
--- a/gateway/sds_gateway/templates/users/files.html
+++ b/gateway/sds_gateway/templates/users/files.html
@@ -178,12 +178,19 @@ Files
{% endif %}
-
- Download
-
+ Download
{% elif item.type == 'file' %}
diff --git a/gateway/sds_gateway/templates/users/partials/captures_page_table.html b/gateway/sds_gateway/templates/users/partials/captures_page_table.html
index 088b8c369..9aa0b6a30 100644
--- a/gateway/sds_gateway/templates/users/partials/captures_page_table.html
+++ b/gateway/sds_gateway/templates/users/partials/captures_page_table.html
@@ -137,15 +137,16 @@
{% endif %}
- Download
From 1c13bddf33798ffe0aee9fe7c23fcb7349d31683 Mon Sep 17 00:00:00 2001
From: klpoland
Date: Wed, 8 Apr 2026 14:24:04 -0400
Subject: [PATCH 8/9] pre-commit fixes
---
.../api_methods/helpers/temporal_filtering.py | 35 +-
gateway/sds_gateway/api_methods/models.py | 13 +-
.../serializers/capture_serializers.py | 57 ++-
.../api_methods/tests/factories.py | 32 +-
.../api_methods/tests/test_celery_tasks.py | 12 +-
.../tests/test_temporal_filtering.py | 43 +-
.../js/actions/DownloadActionManager.js | 375 +++++++++++-------
gateway/sds_gateway/static/js/components.js | 1 -
.../sds_gateway/static/js/core/DOMUtils.js | 4 +-
.../static/js/core/PageLifecycleManager.js | 1 -
.../sds_gateway/templates/users/files.html | 25 +-
.../users/partials/captures_page_table.html | 4 +-
.../users/partials/web_download_modal.html | 52 ++-
.../sds_gateway/users/tests/test_drf_views.py | 23 +-
gateway/sds_gateway/users/views_deprecated.py | 31 +-
15 files changed, 441 insertions(+), 267 deletions(-)
diff --git a/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py b/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py
index 787174fda..bb1a43f9a 100644
--- a/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py
+++ b/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py
@@ -1,12 +1,15 @@
import re
from django.db.models import QuerySet
-
+from loguru import logger as log
from opensearchpy.exceptions import NotFoundError as OpenSearchNotFoundError
-from sds_gateway.api_methods.models import CaptureType, Capture, File, DRF_RF_FILENAME_REGEX_STR
+
+from sds_gateway.api_methods.models import DRF_RF_FILENAME_REGEX_STR
+from sds_gateway.api_methods.models import Capture
+from sds_gateway.api_methods.models import CaptureType
+from sds_gateway.api_methods.models import File
from sds_gateway.api_methods.utils.opensearch_client import get_opensearch_client
from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
-from loguru import logger as log
# Digital RF spec: rf@SECONDS.MILLISECONDS.h5 (e.g. rf@1396379502.000.h5)
# https://github.com/MITHaystack/digital_rf
@@ -55,14 +58,12 @@ def get_capture_bounds(capture_type: CaptureType, capture_uuid: str) -> tuple[in
try:
response = client.get(index=index, id=capture_uuid)
except OpenSearchNotFoundError as e:
- raise ValueError(
- f"Capture {capture_uuid} not found in OpenSearch index {index}"
- ) from e
+ msg = f"Capture {capture_uuid} not found in OpenSearch index {index}"
+ raise ValueError(msg) from e
if not response.get("found"):
- raise ValueError(
- f"Capture {capture_uuid} not found in OpenSearch index {index}"
- )
+ msg = f"Capture {capture_uuid} not found in OpenSearch index {index}"
+ raise ValueError(msg)
source = response.get("_source", {})
search_props = source.get("search_props", {})
@@ -90,7 +91,7 @@ def filter_capture_data_files_selection_bounds(
capture_type: CaptureType,
capture: Capture,
start_time: int, # relative ms from start of capture (from UI)
- end_time: int, # relative ms from start of capture (from UI)
+ end_time: int, # relative ms from start of capture (from UI)
) -> QuerySet[File]:
"""Filter the capture file selection bounds to the given start and end times."""
_catch_capture_type_error(capture_type)
@@ -108,21 +109,27 @@ def filter_capture_data_files_selection_bounds(
name__lte=end_file_name,
).order_by("name")
+
def get_capture_files_with_temporal_filter(
capture_type: CaptureType,
capture: Capture,
- start_time: int | None = None, # milliseconds since start of capture
+ start_time: int | None = None, # milliseconds since start of capture
end_time: int | None = None,
) -> QuerySet[File]:
"""Get the capture files with temporal filtering."""
_catch_capture_type_error(capture_type)
if start_time is None or end_time is None:
- log.warning("Start or end time is None, returning all capture files without temporal filtering")
+ log.warning(
+ "Start or end time is None; returning all capture files without "
+ "temporal filtering"
+ )
return get_capture_files(capture)
# get non-data files
- non_data_files = get_capture_files(capture).exclude(name__regex=DRF_RF_FILENAME_REGEX_STR)
+ non_data_files = get_capture_files(capture).exclude(
+ name__regex=DRF_RF_FILENAME_REGEX_STR
+ )
# get data files with temporal filtering
data_files = filter_capture_data_files_selection_bounds(
@@ -130,4 +137,4 @@ def get_capture_files_with_temporal_filter(
)
# return all files
- return non_data_files.union(data_files)
\ No newline at end of file
+ return non_data_files.union(data_files)
diff --git a/gateway/sds_gateway/api_methods/models.py b/gateway/sds_gateway/api_methods/models.py
index 8bb007266..45bf6073e 100644
--- a/gateway/sds_gateway/api_methods/models.py
+++ b/gateway/sds_gateway/api_methods/models.py
@@ -13,10 +13,10 @@
from blake3 import blake3 as Blake3 # noqa: N812
from django.conf import settings
from django.db import models
-from django.db.models import Sum
from django.db.models import Count
from django.db.models import ProtectedError
from django.db.models import QuerySet
+from django.db.models import Sum
from django.db.models.signals import post_save
from django.db.models.signals import pre_delete
from django.dispatch import receiver
@@ -423,7 +423,6 @@ def get_capture(self) -> dict[str, Any]:
"owner": self.owner,
}
-
def get_drf_data_files_queryset(self) -> QuerySet[File]:
"""DRF data files (rf@*.h5) for this capture (M2M + FK)."""
if self.capture_type != CaptureType.DigitalRF:
@@ -431,14 +430,20 @@ def get_drf_data_files_queryset(self) -> QuerySet[File]:
return File.objects.none()
# Local import avoids circular import (relationship_utils imports Capture).
- from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
+ from sds_gateway.api_methods.utils.relationship_utils import ( # noqa: PLC0415
+ get_capture_files,
+ )
return get_capture_files(self, include_deleted=False).filter(
name__regex=DRF_RF_FILENAME_REGEX_STR,
)
def get_drf_data_files_stats(self) -> dict[str, int]:
- """Count + total size in one query; cached per instance. File PK is ``uuid`` — use ``pk``."""
+ """
+ Count + total size in one query; cached per instance.
+
+ File primary key is ``uuid``; use ``pk`` in aggregates.
+ """
if hasattr(self, "_drf_data_files_stats_cache"):
return self._drf_data_files_stats_cache
diff --git a/gateway/sds_gateway/api_methods/serializers/capture_serializers.py b/gateway/sds_gateway/api_methods/serializers/capture_serializers.py
index 549c09f72..5d0afec42 100644
--- a/gateway/sds_gateway/api_methods/serializers/capture_serializers.py
+++ b/gateway/sds_gateway/api_methods/serializers/capture_serializers.py
@@ -99,7 +99,7 @@ def get_files(self, capture: Capture) -> ReturnList[File]:
@extend_schema_field(serializers.IntegerField(allow_null=True))
def get_total_file_size(self, capture: Capture) -> int | None:
"""Get the total file size of all files associated with this capture."""
-
+
if capture.capture_type != CaptureType.DigitalRF:
return None
@@ -109,11 +109,16 @@ def get_total_file_size(self, capture: Capture) -> int | None:
data_total = self.get_data_files_info(capture).get("total_size", 0)
if total < data_total:
logging.getLogger(__name__).warning(
- "Capture %s: total_file_size (%s) < data_files_total_size (%s); using data total.",
- str(capture.uuid), total, data_total,
+ (
+ "Capture %s: total_file_size (%s) < data_files_total_size (%s); "
+ "using data total."
+ ),
+ str(capture.uuid),
+ total,
+ data_total,
)
total = data_total
-
+
return total
@extend_schema_field(serializers.DictField(allow_null=True))
@@ -135,17 +140,19 @@ def get_data_files_info(self, capture: Capture) -> dict[str, Any]:
def get_center_frequency_ghz(self, capture: Capture) -> float | None:
"""Get the center frequency in GHz from the capture model property."""
return capture.center_frequency_ghz
-
+
@extend_schema_field(serializers.FloatField(allow_null=True))
def get_sample_rate_mhz(self, capture: Capture) -> float | None:
- """Get the sample rate in MHz from the capture model property. None if not indexed in OpenSearch."""
+ """Sample rate in MHz from the model. None if not indexed in OpenSearch."""
return capture.sample_rate_mhz
@extend_schema_field(serializers.IntegerField(allow_null=True))
def get_length_of_capture_ms(self, capture: Capture) -> int | None:
- """Get the length of the capture in milliseconds. OpenSearch bounds are in seconds."""
+ """Capture length in milliseconds (OpenSearch bounds are seconds)."""
try:
- start_time, end_time = get_capture_bounds(capture.capture_type, str(capture.uuid))
+ start_time, end_time = get_capture_bounds(
+ capture.capture_type, str(capture.uuid)
+ )
return (end_time - start_time) * 1000
except (ValueError, IndexError, KeyError):
return None
@@ -160,13 +167,14 @@ def get_file_cadence_ms(self, capture: Capture) -> int | None:
@extend_schema_field(serializers.IntegerField(allow_null=True))
def get_capture_start_epoch_sec(self, capture: Capture) -> int | None:
- """Get the capture start time as Unix epoch seconds. None if not indexed in OpenSearch."""
+ """Capture start as Unix epoch seconds. None if not in OpenSearch."""
try:
start_time, _ = get_capture_bounds(capture.capture_type, str(capture.uuid))
- return start_time
except (ValueError, IndexError, KeyError):
return None
-
+ else:
+ return start_time
+
@extend_schema_field(serializers.DictField)
def get_capture_props(self, capture: Capture) -> dict[str, Any]:
"""Retrieve the indexed metadata for the capture."""
@@ -382,7 +390,7 @@ def get_total_file_size(self, obj: dict[str, Any]) -> int | None:
"""Get the total file size across all channels."""
if obj["capture_type"] != CaptureType.DigitalRF:
return None
-
+
total_size = 0
for channel_data in obj["channels"]:
capture_uuid = channel_data["uuid"]
@@ -390,13 +398,17 @@ def get_total_file_size(self, obj: dict[str, Any]) -> int | None:
all_files = get_capture_files(capture, include_deleted=False)
result = all_files.aggregate(total_size=Sum("size"))
total_size += result["total_size"] or 0
-
+
data_total = self.get_data_files_info(obj).get("total_size", 0)
-
+
if total_size < data_total:
logging.getLogger(__name__).warning(
- "Composite capture: total_file_size (%s) < data_files_total_size (%s); using data total.",
- total_size, data_total,
+ (
+ "Composite capture: total_file_size (%s) < "
+ "data_files_total_size (%s); using data total."
+ ),
+ total_size,
+ data_total,
)
total_size = data_total
return total_size
@@ -405,7 +417,7 @@ def get_data_files_info(self, obj: dict[str, Any]) -> dict[str, Any]:
"""Get the data files info for the composite capture."""
if obj["capture_type"] != CaptureType.DigitalRF:
return {}
-
+
total_count = 0
total_size = 0
for channel_data in obj["channels"]:
@@ -418,7 +430,9 @@ def get_data_files_info(self, obj: dict[str, Any]) -> dict[str, Any]:
return {
"count": total_count,
"total_size": total_size,
- "per_data_file_size": (float(total_size) / total_count) if total_count else None,
+ "per_data_file_size": (float(total_size) / total_count)
+ if total_count
+ else None,
}
@extend_schema_field(serializers.CharField)
@@ -464,12 +478,11 @@ def get_capture_start_epoch_sec(self, obj: dict[str, Any]) -> int | None:
return None
try:
capture = Capture.objects.get(uuid=channels[0]["uuid"])
- start_time, _ = get_capture_bounds(
- capture.capture_type, str(capture.uuid)
- )
- return start_time
+ start_time, _ = get_capture_bounds(capture.capture_type, str(capture.uuid))
except (ValueError, IndexError, KeyError):
return None
+ else:
+ return start_time
def build_composite_capture_data(captures: list[Capture]) -> dict[str, Any]:
diff --git a/gateway/sds_gateway/api_methods/tests/factories.py b/gateway/sds_gateway/api_methods/tests/factories.py
index d0a00849f..31cab86d2 100644
--- a/gateway/sds_gateway/api_methods/tests/factories.py
+++ b/gateway/sds_gateway/api_methods/tests/factories.py
@@ -13,13 +13,13 @@
from unittest.mock import patch
from django.core.files.base import ContentFile
-from faker import Faker as FakerInstance
from factory import Faker as FactoryFaker
from factory import LazyAttribute
from factory import LazyFunction
-from factory import post_generation
from factory import Sequence
+from factory import post_generation
from factory.django import DjangoModelFactory
+from faker import Faker as FakerInstance
from sds_gateway.api_methods.helpers.temporal_filtering import drf_rf_filename_from_ms
from sds_gateway.api_methods.models import Capture
@@ -30,9 +30,10 @@
from sds_gateway.api_methods.models import UserSharePermission
from sds_gateway.users.tests.factories import UserFactory
-# Standalone Faker for LazyFunction callbacks (not factory_boy's FactoryFaker declaration)
+# Standalone Faker for LazyFunction callbacks (not FactoryFaker from factory_boy)
_faker = FakerInstance()
+
class DatasetFactory(DjangoModelFactory):
"""Factory for creating Dataset instances for testing.
@@ -237,26 +238,23 @@ class Meta:
channel = FactoryFaker("word")
capture_type = "drf"
- top_level_dir = LazyFunction(
- lambda: _faker.file_path(depth=2).replace("/", "_")
- )
+ top_level_dir = LazyFunction(lambda: _faker.file_path(depth=2).replace("/", "_"))
owner = FactoryFaker("subfactory", factory=UserFactory)
name = FactoryFaker("slug")
index_name = "captures-drf"
class DRFDataFileFactory(DjangoModelFactory):
- """Factory for creating DRF data file instances for testing.
-
- This factory creates realistic DRF data file objects that represent files stored in the system.
- It generates test data for file metadata and creates a Django ContentFile for the actual file content.
-
- The factory creates files with realistic metadata including size, checksums, and proper file extensions.
- It also handles the creation of the Django file field with test content.
+ """Factory for DRF data file instances used in tests.
+
+ Creates file metadata plus a Django ContentFile for content. Includes
+ checksums, sizes, and extensions; wires the file field for uploads.
"""
uuid = FactoryFaker("uuid4")
- directory = LazyAttribute(lambda obj: f"/files/{obj.owner.email}/{obj.capture.top_level_dir}/")
+ directory = LazyAttribute(
+ lambda obj: f"/files/{obj.owner.email}/{obj.capture.top_level_dir}/"
+ )
name = Sequence(lambda n: drf_rf_filename_from_ms(1000 + n * 1000))
media_type = "application/x-hdf5"
permissions = "rw-r----"
@@ -275,8 +273,6 @@ def file(self, create, extracted, **kwargs):
else:
content = b"test drf file content"
self.file = ContentFile(content, name=self.name)
-
-
class Meta:
model = File
@@ -316,7 +312,9 @@ class UserSharePermissionFactory(DjangoModelFactory):
owner = FactoryFaker("subfactory", factory=UserFactory)
shared_with = FactoryFaker("subfactory", factory=UserFactory)
- item_type = FactoryFaker("random_element", elements=[ItemType.DATASET, ItemType.CAPTURE])
+ item_type = FactoryFaker(
+ "random_element", elements=[ItemType.DATASET, ItemType.CAPTURE]
+ )
item_uuid = FactoryFaker("uuid4")
is_enabled = True
message = FactoryFaker("sentence", nb_words=5)
diff --git a/gateway/sds_gateway/api_methods/tests/test_celery_tasks.py b/gateway/sds_gateway/api_methods/tests/test_celery_tasks.py
index 2ceff4451..5b1eb056a 100644
--- a/gateway/sds_gateway/api_methods/tests/test_celery_tasks.py
+++ b/gateway/sds_gateway/api_methods/tests/test_celery_tasks.py
@@ -25,6 +25,7 @@
from sds_gateway.api_methods.models import ItemType
from sds_gateway.api_methods.models import TemporaryZipFile
from sds_gateway.api_methods.models import ZipFileStatus
+from sds_gateway.api_methods.tasks import _get_item_files
from sds_gateway.api_methods.tasks import acquire_user_lock
from sds_gateway.api_methods.tasks import check_celery_task
from sds_gateway.api_methods.tasks import check_disk_space_available
@@ -36,7 +37,6 @@
from sds_gateway.api_methods.tasks import get_user_task_status
from sds_gateway.api_methods.tasks import is_user_locked
from sds_gateway.api_methods.tasks import release_user_lock
-from sds_gateway.api_methods.tasks import _get_item_files
from sds_gateway.api_methods.tasks import send_item_files_email
from sds_gateway.api_methods.utils.disk_utils import estimate_disk_size
@@ -1270,7 +1270,7 @@ def test_get_item_files_with_temporal_bounds_returns_expected_rf_subset(self):
"sds_gateway.api_methods.helpers.temporal_filtering.get_opensearch_client"
) as m:
m.return_value.get.return_value = mock_response
- # Relative ms: 1000–4000 from capture start → absolute 2s–5s filenames
+ # Relative ms: 1000-4000 from capture start; absolute 2s-5s filenames
result = _get_item_files(
self.user,
self.capture,
@@ -1279,10 +1279,12 @@ def test_get_item_files_with_temporal_bounds_returns_expected_rf_subset(self):
end_time=4000,
)
names = [f.name for f in result]
- # DRF files in [2s, 5s] inclusive (see filter_capture_data_files_selection_bounds)
+ # DRF files [2s,5s] inclusive; see filter_capture_data_files_selection_bounds
expected_rf = [f"rf@{i}.000.h5" for i in range(2, 6)]
rf_names = sorted(n for n in names if n.startswith("rf@"))
- assert rf_names == expected_rf, f"Expected RF files {expected_rf}, got {rf_names}"
- # Metadata / non-DRF capture files from setUp are still included in the download set
+ assert rf_names == expected_rf, (
+ f"Expected RF files {expected_rf}, got {rf_names}"
+ )
+ # Metadata and non-DRF files from setUp stay in the download set
assert "test_file1.txt" in names
assert "test_file2.txt" in names
diff --git a/gateway/sds_gateway/api_methods/tests/test_temporal_filtering.py b/gateway/sds_gateway/api_methods/tests/test_temporal_filtering.py
index fb5dfea8e..3b47a3be1 100644
--- a/gateway/sds_gateway/api_methods/tests/test_temporal_filtering.py
+++ b/gateway/sds_gateway/api_methods/tests/test_temporal_filtering.py
@@ -1,11 +1,13 @@
import time
-
from unittest.mock import patch
+
from django.db.models import QuerySet
from django.test import TestCase
-import sds_gateway.api_methods.helpers.temporal_filtering as temporal_filtering
-from sds_gateway.api_methods.tests.factories import CaptureFactory, DRFDataFileFactory, UserFactory
+from sds_gateway.api_methods.helpers import temporal_filtering
+from sds_gateway.api_methods.tests.factories import CaptureFactory
+from sds_gateway.api_methods.tests.factories import DRFDataFileFactory
+from sds_gateway.api_methods.tests.factories import UserFactory
class TemporalFilteringTestCase(TestCase):
@@ -34,14 +36,16 @@ def _get_test_capture_bounds(self):
def test_rf_filename_ms_conversion(self):
for i in range(10):
expected_ms = (self.now + i) * 1000
- filename_to_ms = temporal_filtering.drf_rf_filename_to_ms(self.files[i].name)
+ filename_to_ms = temporal_filtering.drf_rf_filename_to_ms(
+ self.files[i].name
+ )
assert filename_to_ms is not None
assert filename_to_ms == expected_ms
ms_to_filename = temporal_filtering.drf_rf_filename_from_ms(expected_ms)
assert ms_to_filename is not None
assert ms_to_filename == self.files[i].name
-
+
def test_get_capture_bounds(self):
start_sec, end_sec = self._get_test_capture_bounds()
# mock response, opensearch calls are tested in test_opensearch.py
@@ -54,7 +58,9 @@ def test_get_capture_bounds(self):
}
},
}
- with patch("sds_gateway.api_methods.helpers.temporal_filtering.get_opensearch_client") as m:
+ with patch(
+ "sds_gateway.api_methods.helpers.temporal_filtering.get_opensearch_client"
+ ) as m:
m.return_value.get.return_value = mock_response
start_time, end_time = temporal_filtering.get_capture_bounds(
self.capture.capture_type, str(self.capture.uuid)
@@ -63,7 +69,7 @@ def test_get_capture_bounds(self):
assert end_time is not None
assert start_time == start_sec
assert end_time == end_sec
-
+
def test_get_file_cadence(self):
start_sec, end_sec = self._get_test_capture_bounds()
# mock response, opensearch calls are tested in test_opensearch.py
@@ -76,18 +82,23 @@ def test_get_file_cadence(self):
}
},
}
- with patch("sds_gateway.api_methods.helpers.temporal_filtering.get_opensearch_client") as m:
+ with patch(
+ "sds_gateway.api_methods.helpers.temporal_filtering.get_opensearch_client"
+ ) as m:
m.return_value.get.return_value = mock_response
file_cadence = temporal_filtering.get_file_cadence(
self.capture.capture_type, self.capture
)
-
+
expected_cadence = max(
1, int((end_sec - start_sec) * 1000 / self.file_count)
)
-
+
# duration_ms / DRF data file count (get_drf_data_files_stats total_count)
- assert self.capture.get_drf_data_files_stats()["total_count"] == self.file_count
+ assert (
+ self.capture.get_drf_data_files_stats()["total_count"]
+ == self.file_count
+ )
assert file_cadence == expected_cadence
def test_file_filtering(self):
@@ -105,10 +116,14 @@ def test_file_filtering(self):
}
},
}
- with patch("sds_gateway.api_methods.helpers.temporal_filtering.get_opensearch_client") as m:
+ with patch(
+ "sds_gateway.api_methods.helpers.temporal_filtering.get_opensearch_client"
+ ) as m:
m.return_value.get.return_value = mock_response
- filtered_files = temporal_filtering.filter_capture_data_files_selection_bounds(
- self.capture.capture_type, self.capture, start_ms, end_ms
+ filtered_files = (
+ temporal_filtering.filter_capture_data_files_selection_bounds(
+ self.capture.capture_type, self.capture, start_ms, end_ms
+ )
)
assert isinstance(filtered_files, QuerySet)
assert filtered_files.count() == expected_count
diff --git a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
index e2af831a9..d7b692a9c 100644
--- a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
+++ b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
@@ -12,7 +12,7 @@ function msToHms(ms) {
const s = totalSec % 60;
const decimalMs = n % 1000;
const hms = [h, m, s].map((v) => String(v).padStart(2, "0")).join(":");
- return hms + "." + String(decimalMs).padStart(3, "0");
+ return `${hms}.${String(decimalMs).padStart(3, "0")}`;
}
function formatUtcRange(startEpochSec, startMs, endMs) {
@@ -21,18 +21,8 @@ function formatUtcRange(startEpochSec, startMs, endMs) {
const endDate = new Date(startEpochSec * 1000 + endMs);
const pad2 = (x) => String(x).padStart(2, "0");
const fmt = (d) =>
- pad2(d.getUTCHours()) +
- ":" +
- pad2(d.getUTCMinutes()) +
- ":" +
- pad2(d.getUTCSeconds()) +
- " " +
- pad2(d.getUTCMonth() + 1) +
- "/" +
- pad2(d.getUTCDate()) +
- "/" +
- d.getUTCFullYear();
- return fmt(startDate) + " - " + fmt(endDate) + " (UTC)";
+ `${pad2(d.getUTCHours())}:${pad2(d.getUTCMinutes())}:${pad2(d.getUTCSeconds())} ${pad2(d.getUTCMonth() + 1)}/${pad2(d.getUTCDate())}/${d.getUTCFullYear()}`;
+ return `${fmt(startDate)} - ${fmt(endDate)} (UTC)`;
}
/** Format ms from capture start as UTC string for display (Y-m-d H:i:s). */
@@ -40,27 +30,15 @@ function msToUtcString(captureStartEpochSec, ms) {
if (!Number.isFinite(captureStartEpochSec) || !Number.isFinite(ms)) return "";
const d = new Date(captureStartEpochSec * 1000 + ms);
const pad2 = (x) => String(x).padStart(2, "0");
- return (
- d.getUTCFullYear() +
- "-" +
- pad2(d.getUTCMonth() + 1) +
- "-" +
- pad2(d.getUTCDate()) +
- " " +
- pad2(d.getUTCHours()) +
- ":" +
- pad2(d.getUTCMinutes()) +
- ":" +
- pad2(d.getUTCSeconds())
- );
+ return `${d.getUTCFullYear()}-${pad2(d.getUTCMonth() + 1)}-${pad2(d.getUTCDate())} ${pad2(d.getUTCHours())}:${pad2(d.getUTCMinutes())}:${pad2(d.getUTCSeconds())}`;
}
/** Parse UTC date string (Y-m-d H:i:s or Y-m-d H:i) to epoch ms. */
function parseUtcStringToEpochMs(str) {
- if (!str || !str.trim()) return NaN;
+ if (!str || !str.trim()) return Number.NaN;
const s = str.trim();
- const d = new Date(s.endsWith("Z") ? s : s.replace(" ", "T") + "Z");
- return Number.isFinite(d.getTime()) ? d.getTime() : NaN;
+ const d = new Date(s.endsWith("Z") ? s : `${s.replace(" ", "T")}Z`);
+ return Number.isFinite(d.getTime()) ? d.getTime() : Number.NaN;
}
class DownloadActionManager {
@@ -119,17 +97,17 @@ class DownloadActionManager {
/**
* Initialize or update the capture download temporal slider. Call before
- * showing the modal when opening for a capture with known bounds.
- * @param {number} durationMs - Total capture duration in milliseconds
- * @param {number} fileCadenceMs - File cadence in milliseconds (step)
- * @param {Object} opts - Optional: { perDataFileSize, totalSize, dataFilesCount, totalFilesCount, dataFilesTotalSize, captureUuid, captureStartEpochSec }
- */
+ * showing the modal when opening for a capture with known bounds.
+ * @param {number} durationMs - Total capture duration in milliseconds
+ * @param {number} fileCadenceMs - File cadence in milliseconds (step)
+ * @param {Object} opts - Optional: { perDataFileSize, totalSize, dataFilesCount, totalFilesCount, dataFilesTotalSize, captureUuid, captureStartEpochSec }
+ */
initializeCaptureDownloadSlider(modalId, durationMs, fileCadenceMs, opts) {
const webDownloadModal = document.getElementById(modalId);
if (!webDownloadModal) return;
- opts = opts || {};
- const q = (id) => webDownloadModal.querySelector("#" + id);
+ const resolvedOpts = opts ?? {};
+ const q = (id) => webDownloadModal.querySelector(`#${id}`);
const sliderEl = q("temporalFilterSlider");
const rangeLabel = q("temporalFilterRangeLabel");
const totalFilesLabel = q("totalFilesLabel");
@@ -144,74 +122,98 @@ class DownloadActionManager {
const endDateTimeEntry = q("endDateTimeEntry");
const rangeHintEl = q("temporalRangeHint");
const sizeWarningEl = q("temporalFilterSizeWarning");
- if (!sliderEl || typeof noUiSlider === 'undefined') return;
- durationMs = Number(durationMs);
- if (!Number.isFinite(durationMs) || durationMs < 0) durationMs = 0;
- fileCadenceMs = Number(fileCadenceMs);
- if (!Number.isFinite(fileCadenceMs) || fileCadenceMs < 1) fileCadenceMs = 1000;
- const perDataFileSize = Number(opts.perDataFileSize) || 0;
- const totalSize = Number(opts.totalSize) || 0;
- const dataFilesCount = Number(opts.dataFilesCount) || 0;
- const totalFilesCount = Number(opts.totalFilesCount) || 0;
- let dataFilesTotalSize = Number(opts.dataFilesTotalSize);
+ if (!sliderEl || typeof noUiSlider === "undefined") return;
+ const resolvedDurationMs = (() => {
+ const n = Number(durationMs);
+ return !Number.isFinite(n) || n < 0 ? 0 : n;
+ })();
+ const resolvedFileCadenceMs = (() => {
+ const n = Number(fileCadenceMs);
+ return !Number.isFinite(n) || n < 1 ? 1000 : n;
+ })();
+ const perDataFileSize = Number(resolvedOpts.perDataFileSize) || 0;
+ const totalSize = Number(resolvedOpts.totalSize) || 0;
+ const dataFilesCount = Number(resolvedOpts.dataFilesCount) || 0;
+ const totalFilesCount = Number(resolvedOpts.totalFilesCount) || 0;
+ let dataFilesTotalSize = Number(resolvedOpts.dataFilesTotalSize);
if (!Number.isFinite(dataFilesTotalSize) || dataFilesTotalSize < 0) {
dataFilesTotalSize = perDataFileSize * dataFilesCount;
}
let metadataFilesTotalSize = totalSize - dataFilesTotalSize;
if (metadataFilesTotalSize < 0) metadataFilesTotalSize = 0;
const metadataFilesCount = Math.max(0, totalFilesCount - dataFilesCount);
- const captureUuid = opts.captureUuid != null ? String(opts.captureUuid) : '';
- const captureStartEpochSec = Number(opts.captureStartEpochSec);
+ const captureUuid =
+ resolvedOpts.captureUuid != null ? String(resolvedOpts.captureUuid) : "";
+ const captureStartEpochSec = Number(resolvedOpts.captureStartEpochSec);
if (totalSize > 0 && dataFilesTotalSize > totalSize) {
console.warn(
- '[DownloadActionManager] data files total size exceeds total size (backend/query inconsistency).',
- { captureUuid: captureUuid || '(unknown)', totalSize, dataFilesTotalSize, perDataFileSize, dataFilesCount }
+ "[DownloadActionManager] data files total size exceeds total size (backend/query inconsistency).",
+ {
+ captureUuid: captureUuid || "(unknown)",
+ totalSize,
+ dataFilesTotalSize,
+ perDataFileSize,
+ dataFilesCount,
+ },
);
if (sizeWarningEl) {
- sizeWarningEl.classList.remove('d-none');
+ sizeWarningEl.classList.remove("d-none");
}
dataFilesTotalSize = totalSize;
metadataFilesTotalSize = 0;
} else if (sizeWarningEl) {
- sizeWarningEl.classList.add('d-none');
+ sizeWarningEl.classList.add("d-none");
}
if (webDownloadModal) {
- webDownloadModal.dataset.durationMs = String(Math.round(durationMs));
- webDownloadModal.dataset.fileCadenceMs = String(fileCadenceMs);
- webDownloadModal.dataset.captureStartEpochSec = Number.isFinite(captureStartEpochSec) ? String(captureStartEpochSec) : '';
+ webDownloadModal.dataset.durationMs = String(
+ Math.round(resolvedDurationMs),
+ );
+ webDownloadModal.dataset.fileCadenceMs = String(resolvedFileCadenceMs);
+ webDownloadModal.dataset.captureStartEpochSec = Number.isFinite(
+ captureStartEpochSec,
+ )
+ ? String(captureStartEpochSec)
+ : "";
}
- if (rangeHintEl) rangeHintEl.textContent = '0 – ' + Math.round(durationMs) + ' ms';
+ if (rangeHintEl)
+ rangeHintEl.textContent = `0 – ${Math.round(resolvedDurationMs)} ms`;
if (sliderEl.noUiSlider) {
sliderEl.noUiSlider.destroy();
}
- if (rangeLabel) rangeLabel.textContent = '—';
- if (totalFilesLabel) totalFilesLabel.textContent = '0 files';
- if (totalSizeLabel) totalSizeLabel.textContent = window.DOMUtils.formatFileSize(totalSize);
- if (dateTimeLabel) dateTimeLabel.textContent = '—';
- if (startTimeInput) startTimeInput.value = '';
- if (endTimeInput) endTimeInput.value = '';
- if (startTimeEntry) startTimeEntry.value = '';
- if (endTimeEntry) endTimeEntry.value = '';
+ if (rangeLabel) rangeLabel.textContent = "—";
+ if (totalFilesLabel) totalFilesLabel.textContent = "0 files";
+ if (totalSizeLabel)
+ totalSizeLabel.textContent = window.DOMUtils.formatFileSize(totalSize);
+ if (dateTimeLabel) dateTimeLabel.textContent = "—";
+ if (startTimeInput) startTimeInput.value = "";
+ if (endTimeInput) endTimeInput.value = "";
+ if (startTimeEntry) startTimeEntry.value = "";
+ if (endTimeEntry) endTimeEntry.value = "";
const hasEpoch = Number.isFinite(captureStartEpochSec);
if (startDateTimeEntry) {
- startDateTimeEntry.value = '';
+ startDateTimeEntry.value = "";
startDateTimeEntry.disabled = !hasEpoch;
}
if (endDateTimeEntry) {
- endDateTimeEntry.value = '';
+ endDateTimeEntry.value = "";
endDateTimeEntry.disabled = !hasEpoch;
}
- if (durationMs <= 0) return;
+ if (resolvedDurationMs <= 0) return;
let fpStart = null;
let fpEnd = null;
const epochStart = captureStartEpochSec * 1000;
- const epochEnd = epochStart + durationMs;
- if (hasEpoch && typeof flatpickr !== 'undefined' && startDateTimeEntry && endDateTimeEntry) {
+ const epochEnd = epochStart + resolvedDurationMs;
+ if (
+ hasEpoch &&
+ typeof flatpickr !== "undefined" &&
+ startDateTimeEntry &&
+ endDateTimeEntry
+ ) {
const fpOpts = {
enableTime: true,
enableSeconds: true,
utc: true,
- dateFormat: 'Y-m-d H:i:S',
+ dateFormat: "Y-m-d H:i:S",
time_24hr: true,
minDate: epochStart,
maxDate: epochEnd,
@@ -219,135 +221,209 @@ class DownloadActionManager {
static: true,
appendTo: webDownloadModal || undefined,
};
- flatpickr(startDateTimeEntry, Object.assign({}, fpOpts, {
- onChange: function() { syncFromDateTimeEntries(); }
- }));
- flatpickr(endDateTimeEntry, Object.assign({}, fpOpts, {
- onChange: function() { syncFromDateTimeEntries(); }
- }));
+ flatpickr(
+ startDateTimeEntry,
+ Object.assign({}, fpOpts, {
+ onChange: () => {
+ syncFromDateTimeEntries();
+ },
+ }),
+ );
+ flatpickr(
+ endDateTimeEntry,
+ Object.assign({}, fpOpts, {
+ onChange: () => {
+ syncFromDateTimeEntries();
+ },
+ }),
+ );
fpStart = startDateTimeEntry._flatpickr;
fpEnd = endDateTimeEntry._flatpickr;
startDateTimeEntry.disabled = false;
endDateTimeEntry.disabled = false;
}
noUiSlider.create(sliderEl, {
- start: [0, durationMs],
+ start: [0, resolvedDurationMs],
connect: true,
- step: fileCadenceMs,
- range: { min: 0, max: durationMs },
+ step: resolvedFileCadenceMs,
+ range: { min: 0, max: resolvedDurationMs },
});
- sliderEl.noUiSlider.on('update', function(values) {
+ sliderEl.noUiSlider.on("update", (values) => {
const startMs = Number(values[0]);
const endMs = Number(values[1]);
// the + 1 is to include the first file in the selection
// as file cadence is the time between files, not the time of the file
- const filesInSelection = Math.round((endMs - startMs) / fileCadenceMs) + 1;
+ const filesInSelection =
+ Math.round((endMs - startMs) / resolvedFileCadenceMs) + 1;
if (rangeLabel) {
- rangeLabel.textContent = msToHms(startMs) + ' - ' + msToHms(endMs);
+ rangeLabel.textContent = `${msToHms(startMs)} - ${msToHms(endMs)}`;
}
if (totalFilesLabel) {
- totalFilesLabel.textContent = dataFilesCount > 0
- ? filesInSelection + ' of ' + dataFilesCount + ' files'
- : filesInSelection + ' files';
+ totalFilesLabel.textContent =
+ dataFilesCount > 0
+ ? `${filesInSelection} of ${dataFilesCount} files`
+ : `${filesInSelection} files`;
}
if (totalSizeLabel) {
totalSizeLabel.textContent = window.DOMUtils.formatFileSize(
- (perDataFileSize * filesInSelection) + metadataFilesTotalSize
+ perDataFileSize * filesInSelection + metadataFilesTotalSize,
);
}
if (dateTimeLabel && Number.isFinite(captureStartEpochSec)) {
- dateTimeLabel.textContent = formatUtcRange(captureStartEpochSec, startMs, endMs);
+ dateTimeLabel.textContent = formatUtcRange(
+ captureStartEpochSec,
+ startMs,
+ endMs,
+ );
}
if (startTimeInput) startTimeInput.value = String(Math.round(startMs));
if (endTimeInput) endTimeInput.value = String(Math.round(endMs));
if (startTimeEntry) startTimeEntry.value = String(Math.round(startMs));
if (endTimeEntry) endTimeEntry.value = String(Math.round(endMs));
if (hasEpoch) {
- if (fpStart && typeof fpStart.setDate === 'function') fpStart.setDate(epochStart + startMs);
- else if (startDateTimeEntry) startDateTimeEntry.value = msToUtcString(captureStartEpochSec, startMs);
- if (fpEnd && typeof fpEnd.setDate === 'function') fpEnd.setDate(epochStart + endMs);
- else if (endDateTimeEntry) endDateTimeEntry.value = msToUtcString(captureStartEpochSec, endMs);
+ if (fpStart && typeof fpStart.setDate === "function")
+ fpStart.setDate(epochStart + startMs);
+ else if (startDateTimeEntry)
+ startDateTimeEntry.value = msToUtcString(
+ captureStartEpochSec,
+ startMs,
+ );
+ if (fpEnd && typeof fpEnd.setDate === "function")
+ fpEnd.setDate(epochStart + endMs);
+ else if (endDateTimeEntry)
+ endDateTimeEntry.value = msToUtcString(captureStartEpochSec, endMs);
}
});
if (rangeLabel) {
- rangeLabel.textContent = '0:00:00.000 - ' + msToHms(durationMs);
+ rangeLabel.textContent = `0:00:00.000 - ${msToHms(resolvedDurationMs)}`;
}
if (totalFilesLabel) {
- totalFilesLabel.textContent = dataFilesCount > 0
- ? dataFilesCount + ' files'
- : '0 files';
+ totalFilesLabel.textContent =
+ dataFilesCount > 0 ? `${dataFilesCount} files` : "0 files";
}
if (metadataFilesLabel) {
- metadataFilesLabel.textContent = metadataFilesCount > 0
- ? metadataFilesCount + ' files'
- : '0 files';
+ metadataFilesLabel.textContent =
+ metadataFilesCount > 0 ? `${metadataFilesCount} files` : "0 files";
}
if (dateTimeLabel && Number.isFinite(captureStartEpochSec)) {
- dateTimeLabel.textContent = formatUtcRange(captureStartEpochSec, 0, durationMs);
+ dateTimeLabel.textContent = formatUtcRange(
+ captureStartEpochSec,
+ 0,
+ resolvedDurationMs,
+ );
}
- const startVal = '0';
- const endVal = String(durationMs);
+ const startVal = "0";
+ const endVal = String(resolvedDurationMs);
if (startTimeInput) startTimeInput.value = startVal;
if (endTimeInput) endTimeInput.value = endVal;
if (startTimeEntry) startTimeEntry.value = startVal;
if (endTimeEntry) endTimeEntry.value = endVal;
if (hasEpoch && startDateTimeEntry && endDateTimeEntry) {
- if (fpStart && typeof fpStart.setDate === 'function') fpStart.setDate(epochStart);
+ if (fpStart && typeof fpStart.setDate === "function")
+ fpStart.setDate(epochStart);
else startDateTimeEntry.value = msToUtcString(captureStartEpochSec, 0);
- if (fpEnd && typeof fpEnd.setDate === 'function') fpEnd.setDate(epochEnd);
- else endDateTimeEntry.value = msToUtcString(captureStartEpochSec, durationMs);
- if (!fpStart) { startDateTimeEntry.disabled = false; endDateTimeEntry.disabled = false; }
+ if (fpEnd && typeof fpEnd.setDate === "function") fpEnd.setDate(epochEnd);
+ else
+ endDateTimeEntry.value = msToUtcString(
+ captureStartEpochSec,
+ resolvedDurationMs,
+ );
+ if (!fpStart) {
+ startDateTimeEntry.disabled = false;
+ endDateTimeEntry.disabled = false;
+ }
}
function syncSliderFromEntries() {
if (!sliderEl.noUiSlider || !startTimeEntry || !endTimeEntry) return;
const s = startTimeEntry.value.trim();
const e = endTimeEntry.value.trim();
- let startMs = s === '' ? 0 : parseInt(s, 10);
- let endMs = e === '' ? durationMs : parseInt(e, 10);
+ let startMs = s === "" ? 0 : Number.parseInt(s, 10);
+ let endMs = e === "" ? resolvedDurationMs : Number.parseInt(e, 10);
if (!Number.isFinite(startMs)) startMs = 0;
- if (!Number.isFinite(endMs)) endMs = durationMs;
- startMs = Math.max(0, Math.min(startMs, durationMs));
- endMs = Math.max(0, Math.min(endMs, durationMs));
- if (startMs >= endMs) endMs = Math.min(startMs + fileCadenceMs, durationMs);
+ if (!Number.isFinite(endMs)) endMs = resolvedDurationMs;
+ startMs = Math.max(0, Math.min(startMs, resolvedDurationMs));
+ endMs = Math.max(0, Math.min(endMs, resolvedDurationMs));
+ if (startMs >= endMs)
+ endMs = Math.min(startMs + resolvedFileCadenceMs, resolvedDurationMs);
sliderEl.noUiSlider.set([startMs, endMs]);
}
function syncFromDateTimeEntries() {
- if (!hasEpoch || !sliderEl.noUiSlider || !startDateTimeEntry || !endDateTimeEntry) return;
- let startMs, endMs;
+ if (
+ !hasEpoch ||
+ !sliderEl.noUiSlider ||
+ !startDateTimeEntry ||
+ !endDateTimeEntry
+ )
+ return;
+ let startMs;
+ let endMs;
if (startDateTimeEntry._flatpickr && endDateTimeEntry._flatpickr) {
const dStart = startDateTimeEntry._flatpickr.selectedDates[0];
const dEnd = endDateTimeEntry._flatpickr.selectedDates[0];
startMs = dStart ? dStart.getTime() - epochStart : 0;
- endMs = dEnd ? dEnd.getTime() - epochStart : durationMs;
+ endMs = dEnd ? dEnd.getTime() - epochStart : resolvedDurationMs;
} else {
- startMs = parseUtcStringToEpochMs(startDateTimeEntry.value) - epochStart;
+ startMs =
+ parseUtcStringToEpochMs(startDateTimeEntry.value) - epochStart;
endMs = parseUtcStringToEpochMs(endDateTimeEntry.value) - epochStart;
}
if (Number.isNaN(startMs) || Number.isNaN(endMs)) return;
- startMs = Math.max(0, Math.min(startMs, durationMs));
- endMs = Math.max(0, Math.min(endMs, durationMs));
- if (startMs >= endMs) endMs = Math.min(startMs + fileCadenceMs, durationMs);
+ startMs = Math.max(0, Math.min(startMs, resolvedDurationMs));
+ endMs = Math.max(0, Math.min(endMs, resolvedDurationMs));
+ if (startMs >= endMs)
+ endMs = Math.min(startMs + resolvedFileCadenceMs, resolvedDurationMs);
const cur = sliderEl.noUiSlider.get();
- if (Math.round(Number(cur[0])) === Math.round(startMs) && Math.round(Number(cur[1])) === Math.round(endMs)) return;
+ if (
+ Math.round(Number(cur[0])) === Math.round(startMs) &&
+ Math.round(Number(cur[1])) === Math.round(endMs)
+ )
+ return;
sliderEl.noUiSlider.set([startMs, endMs]);
}
- if (startTimeEntry) startTimeEntry.addEventListener('change', syncSliderFromEntries);
- if (endTimeEntry) endTimeEntry.addEventListener('change', syncSliderFromEntries);
- if (startDateTimeEntry && !startDateTimeEntry._flatpickr) startDateTimeEntry.addEventListener('change', syncFromDateTimeEntries);
- if (endDateTimeEntry && !endDateTimeEntry._flatpickr) endDateTimeEntry.addEventListener('change', syncFromDateTimeEntries);
+ if (startTimeEntry)
+ startTimeEntry.addEventListener("change", syncSliderFromEntries);
+ if (endTimeEntry)
+ endTimeEntry.addEventListener("change", syncSliderFromEntries);
+ if (startDateTimeEntry && !startDateTimeEntry._flatpickr)
+ startDateTimeEntry.addEventListener("change", syncFromDateTimeEntries);
+ if (endDateTimeEntry && !endDateTimeEntry._flatpickr)
+ endDateTimeEntry.addEventListener("change", syncFromDateTimeEntries);
}
setTemporalSliderAttrs(modalId, button, itemUuid) {
// Initialize temporal slider from button data attributes (clears or builds slider)
- let durationMs = parseInt(button.getAttribute("data-length-of-capture-ms"), 10);
- let fileCadenceMs = parseInt(button.getAttribute("data-file-cadence-ms"), 10);
- let perDataFileSize = parseFloat(button.getAttribute("data-per-data-file-size"));
- let dataFilesCount = parseInt(button.getAttribute("data-data-files-count"), 10);
- let dataFilesTotalSize = parseInt(button.getAttribute("data-total-data-file-size"), 10);
- let totalSize = parseInt(button.getAttribute("data-total-size"), 10);
- let totalFilesCount = parseInt(button.getAttribute("data-total-files-count"), 10);
- let captureStartEpochSec = parseInt(button.getAttribute("data-capture-start-epoch-sec"), 10);
+ const durationMs = Number.parseInt(
+ button.getAttribute("data-length-of-capture-ms"),
+ 10,
+ );
+ const fileCadenceMs = Number.parseInt(
+ button.getAttribute("data-file-cadence-ms"),
+ 10,
+ );
+ const perDataFileSize = Number.parseFloat(
+ button.getAttribute("data-per-data-file-size"),
+ );
+ const dataFilesCount = Number.parseInt(
+ button.getAttribute("data-data-files-count"),
+ 10,
+ );
+ const dataFilesTotalSize = Number.parseInt(
+ button.getAttribute("data-total-data-file-size"),
+ 10,
+ );
+ const totalSize = Number.parseInt(
+ button.getAttribute("data-total-size"),
+ 10,
+ );
+ const totalFilesCount = Number.parseInt(
+ button.getAttribute("data-total-files-count"),
+ 10,
+ );
+ const captureStartEpochSec = Number.parseInt(
+ button.getAttribute("data-capture-start-epoch-sec"),
+ 10,
+ );
this.initializeCaptureDownloadSlider(
modalId,
Number.isNaN(durationMs) ? 0 : durationMs,
@@ -357,9 +433,13 @@ class DownloadActionManager {
totalSize: Number.isNaN(totalSize) ? 0 : totalSize,
dataFilesCount: Number.isNaN(dataFilesCount) ? 0 : dataFilesCount,
totalFilesCount: Number.isNaN(totalFilesCount) ? 0 : totalFilesCount,
- dataFilesTotalSize: Number.isNaN(dataFilesTotalSize) ? undefined : dataFilesTotalSize,
+ dataFilesTotalSize: Number.isNaN(dataFilesTotalSize)
+ ? undefined
+ : dataFilesTotalSize,
captureUuid: itemUuid || undefined,
- captureStartEpochSec: Number.isNaN(captureStartEpochSec) ? undefined : captureStartEpochSec,
+ captureStartEpochSec: Number.isNaN(captureStartEpochSec)
+ ? undefined
+ : captureStartEpochSec,
},
);
}
@@ -378,9 +458,10 @@ class DownloadActionManager {
const entryStart = startEntry.value.trim();
const entryEnd = endEntry.value.trim();
if (entryStart !== "" || entryEnd !== "") {
- const durationMs = parseInt(modalEl.dataset.durationMs, 10);
- const startMs = entryStart === "" ? 0 : parseInt(entryStart, 10);
- const endMs = entryEnd === "" ? durationMs : parseInt(entryEnd, 10);
+ const durationMs = Number.parseInt(modalEl.dataset.durationMs, 10);
+ const startMs = entryStart === "" ? 0 : Number.parseInt(entryStart, 10);
+ const endMs =
+ entryEnd === "" ? durationMs : Number.parseInt(entryEnd, 10);
if (
!Number.isFinite(startMs) ||
!Number.isFinite(endMs) ||
@@ -389,7 +470,7 @@ class DownloadActionManager {
startMs >= endMs
) {
this.showToast(
- "Please enter valid start/end times (0 ≤ start < end ≤ " + durationMs + " ms).",
+ `Please enter valid start/end times (0 ≤ start < end ≤ ${durationMs} ms).`,
"warning",
);
return;
@@ -399,9 +480,14 @@ class DownloadActionManager {
}
}
- let body = {};
+ const body = {};
let isJson = true;
- if (startTimeInput && endTimeInput && startTimeInput.value && endTimeInput.value) {
+ if (
+ startTimeInput &&
+ endTimeInput &&
+ startTimeInput.value &&
+ endTimeInput.value
+ ) {
body.start_time = startTimeInput.value;
body.end_time = endTimeInput.value;
isJson = false;
@@ -424,7 +510,9 @@ class DownloadActionManager {
}
// Handle confirm download
- const confirmBtn = document.getElementById(`confirmWebDownloadBtn-${itemUuid}`);
+ const confirmBtn = document.getElementById(
+ `confirmWebDownloadBtn-${itemUuid}`,
+ );
if (!confirmBtn) return;
// Remove any existing event listeners
@@ -446,7 +534,6 @@ class DownloadActionManager {
let body = {};
let isJson = false;
try {
-
if (itemType === "capture") {
const result = this.addTimeFilteringToFetchRequest(modalId);
body = result.body;
@@ -584,7 +671,7 @@ class DownloadActionManager {
* @param {Object} item - Item object
* @returns {boolean} Whether user can download
*/
- canDownloadItem(item) {
+ canDownloadItem(_item) {
// Check basic download permission
if (!this.permissions.canDownload()) {
return false;
@@ -611,9 +698,7 @@ class DownloadActionManager {
*/
cleanup() {
// Remove event listeners and clean up any resources
- const downloadButtons = document.querySelectorAll(
- ".web-download-btn",
- );
+ const downloadButtons = document.querySelectorAll(".web-download-btn");
for (const button of downloadButtons) {
button.removeEventListener("click", this.initializeWebDownloadButtons);
}
diff --git a/gateway/sds_gateway/static/js/components.js b/gateway/sds_gateway/static/js/components.js
index 8bb020a5c..9307335e4 100644
--- a/gateway/sds_gateway/static/js/components.js
+++ b/gateway/sds_gateway/static/js/components.js
@@ -23,7 +23,6 @@ const ComponentUtils = {
return div.innerHTML;
},
-
/**
* Formats date for display with date and time on separate lines
* @param {string} dateString - ISO date string or formatted date string
diff --git a/gateway/sds_gateway/static/js/core/DOMUtils.js b/gateway/sds_gateway/static/js/core/DOMUtils.js
index 3309538f5..8597e64c1 100644
--- a/gateway/sds_gateway/static/js/core/DOMUtils.js
+++ b/gateway/sds_gateway/static/js/core/DOMUtils.js
@@ -28,7 +28,7 @@ class DOMUtils {
v /= 1024;
i++;
}
- return (i === 0 ? v : v.toFixed(2)) + " " + units[i];
+ return `${i === 0 ? v : v.toFixed(2)} ${units[i]}`;
}
/**
@@ -209,7 +209,7 @@ class DOMUtils {
try {
bootstrapModal.dispose();
bootstrapModal = null;
- } catch (e) {
+ } catch (_e) {
// If disposal fails, force remove the instance
bootstrapModal = null;
}
diff --git a/gateway/sds_gateway/static/js/core/PageLifecycleManager.js b/gateway/sds_gateway/static/js/core/PageLifecycleManager.js
index e3fa07f1d..b8f9d8846 100644
--- a/gateway/sds_gateway/static/js/core/PageLifecycleManager.js
+++ b/gateway/sds_gateway/static/js/core/PageLifecycleManager.js
@@ -176,7 +176,6 @@ class PageLifecycleManager {
this.initializeDatasetModals();
}
-
/**
* Single DownloadActionManager for document-wide .web-download-btn / SDK buttons (not per modal).
*/
diff --git a/gateway/sds_gateway/templates/users/files.html b/gateway/sds_gateway/templates/users/files.html
index 9cc93386a..44028e2e8 100644
--- a/gateway/sds_gateway/templates/users/files.html
+++ b/gateway/sds_gateway/templates/users/files.html
@@ -179,18 +179,19 @@ Files
{% endif %}
Download
+ type="button"
+ data-item-uuid="{{ item.uuid }}"
+ data-item-type="capture"
+ data-length-of-capture-ms="{{ item.length_of_capture_ms|default:0 }}"
+ data-file-cadence-ms="{{ item.file_cadence_ms|default:1000 }}"
+ data-per-data-file-size="{{ item.data_files_info.per_data_file_size|default:0 }}"
+ data-total-data-file-size="{{ item.data_files_info.total_size|default:0 }}"
+ data-total-size="{{ item.total_file_size|default:0 }}"
+ data-data-files-count="{{ item.data_files_info.count|default:0 }}"
+ data-total-files-count="{{ item.files|length|default:0 }}"
+ data-capture-start-epoch-sec="{{ item.capture_start_epoch_sec|default:0 }}">
+ Download
+
{% elif item.type == 'file' %}
diff --git a/gateway/sds_gateway/templates/users/partials/captures_page_table.html b/gateway/sds_gateway/templates/users/partials/captures_page_table.html
index 9aa0b6a30..ea1ccb417 100644
--- a/gateway/sds_gateway/templates/users/partials/captures_page_table.html
+++ b/gateway/sds_gateway/templates/users/partials/captures_page_table.html
@@ -148,7 +148,9 @@
data-total-size="{{ cap.total_file_size|default:0 }}"
data-data-files-count="{{ cap.data_files_info.count|default:0 }}"
data-total-files-count="{{ cap.files|length|default:0 }}"
- data-capture-start-epoch-sec="{{ cap.capture_start_epoch_sec|default:0 }}">Download
+ data-capture-start-epoch-sec="{{ cap.capture_start_epoch_sec|default:0 }}">
+ Download
+
{% if cap.capture_type == 'drf' %}
diff --git a/gateway/sds_gateway/templates/users/partials/web_download_modal.html b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
index 8003acd2d..b3d60d0f1 100644
--- a/gateway/sds_gateway/templates/users/partials/web_download_modal.html
+++ b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
@@ -44,34 +44,64 @@
- Valid range: 0 – — ms (same values as the slider above)
-
+
+ Valid range: 0 – — ms (same values as the slider above)
+
+
Size estimates may be inaccurate (server totals inconsistent).
-
Range: 0:00:00.000 - 0:00:00.000
-
Time (UTC): —
-
Files in selection: 0 files
-
Number of metadata files to include in download: 0 files
-
Total download size (including metadata files): 0 bytes
+
+ Range: 0:00:00.000 - 0:00:00.000
+
+
+ Time (UTC): —
+
+
+ Files in selection: 0 files
+
+
+ Number of metadata files to include in download: 0 files
+
+
+ Total download size (including metadata files): 0 bytes
+
diff --git a/gateway/sds_gateway/users/tests/test_drf_views.py b/gateway/sds_gateway/users/tests/test_drf_views.py
index c3c6c8d65..41fb71c0e 100644
--- a/gateway/sds_gateway/users/tests/test_drf_views.py
+++ b/gateway/sds_gateway/users/tests/test_drf_views.py
@@ -587,7 +587,7 @@ def test_unified_download_dataset_invalid_type(
def test_unified_download_capture_with_time_filter_success(
self, client: Client, owner: User
) -> None:
- """Test capture download request with start_time/end_time passes bounds to task."""
+ """POST with start_time/end_time forwards bounds to the download task."""
capture = Capture.objects.create(
uuid=uuid.uuid4(),
name="Test DRF Capture",
@@ -601,11 +601,11 @@ def test_unified_download_capture_with_time_filter_success(
"users:download_item",
kwargs={"item_type": ItemType.CAPTURE, "item_uuid": capture.uuid},
)
- data = {"start_time": "1000", "end_time": "5000"}
+ start_ms = 1000
+ end_ms = 5000
+ data = {"start_time": str(start_ms), "end_time": str(end_ms)}
- with patch(
- "sds_gateway.users.views.send_item_files_email"
- ) as mock_send_task:
+ with patch("sds_gateway.users.views.send_item_files_email") as mock_send_task:
mock_send_task.delay.return_value = type("Result", (), {"id": "task-1"})()
response = client.post(url, data)
@@ -615,8 +615,8 @@ def test_unified_download_capture_with_time_filter_success(
assert "download request accepted" in result["message"].lower()
mock_send_task.delay.assert_called_once()
call_kwargs = mock_send_task.delay.call_args[1]
- assert call_kwargs["start_time"] == 1000
- assert call_kwargs["end_time"] == 5000
+ assert call_kwargs["start_time"] == start_ms
+ assert call_kwargs["end_time"] == end_ms
def test_unified_download_capture_without_time_filter(
self, client: Client, owner: User
@@ -636,9 +636,7 @@ def test_unified_download_capture_without_time_filter(
kwargs={"item_type": ItemType.CAPTURE, "item_uuid": capture.uuid},
)
- with patch(
- "sds_gateway.users.views.send_item_files_email"
- ) as mock_send_task:
+ with patch("sds_gateway.users.views.send_item_files_email") as mock_send_task:
mock_send_task.delay.return_value = type("Result", (), {"id": "task-1"})()
response = client.post(url)
@@ -672,4 +670,7 @@ def test_unified_download_capture_invalid_time_range(
assert response.status_code == status.HTTP_400_BAD_REQUEST
result = response.json()
assert result["success"] is False
- assert "start_time" in result["message"].lower() or "time range" in result["message"].lower()
+ assert (
+ "start_time" in result["message"].lower()
+ or "time range" in result["message"].lower()
+ )
diff --git a/gateway/sds_gateway/users/views_deprecated.py b/gateway/sds_gateway/users/views_deprecated.py
index 552fb75a4..64ba32b80 100644
--- a/gateway/sds_gateway/users/views_deprecated.py
+++ b/gateway/sds_gateway/users/views_deprecated.py
@@ -3301,30 +3301,47 @@ def _serve_file_download(self, zip_uuid: str, user) -> HttpResponse:
user_temporary_zip_download_view = TemporaryZipDownloadView.as_view()
-def _parse_optional_time(raw_value: str | None, param_name: str) -> tuple[int | None, JsonResponse | None]:
- """Parse optional start/end time. Returns (value, None) or (None, error_response)."""
+def _parse_optional_time(
+ raw_value: str | None, param_name: str
+) -> tuple[int | None, JsonResponse | None]:
+ """Parse optional start/end time.
+
+ Returns (value, None), or (None, error JsonResponse).
+ """
if raw_value in (None, ""):
return None, None
try:
value = int(raw_value)
except (TypeError, ValueError):
return None, JsonResponse(
- {"success": False, "message": f"Invalid {param_name}; it must be an integer value."},
+ {
+ "success": False,
+ "message": f"Invalid {param_name}; it must be an integer value.",
+ },
status=400,
)
if value < 0:
+ message = f"Invalid {param_name}; it must be greater than or equal to 0."
return None, JsonResponse(
- {"success": False, "message": f"Invalid {param_name}; it must be greater than or equal to 0."},
+ {
+ "success": False,
+ "message": message,
+ },
status=400,
)
return value, None
-def _validate_time_range(start_time: int | None, end_time: int | None) -> JsonResponse | None:
+def _validate_time_range(
+ start_time: int | None, end_time: int | None
+) -> JsonResponse | None:
"""Return 400 JsonResponse if both provided and start >= end; else None."""
if start_time is not None and end_time is not None and start_time >= end_time:
return JsonResponse(
- {"success": False, "message": "Invalid time range; start_time must be less than end_time."},
+ {
+ "success": False,
+ "message": "Invalid time range; start_time must be less than end_time.",
+ },
status=400,
)
return None
@@ -3344,7 +3361,7 @@ class DownloadItemView(Auth0LoginRequiredMixin, View):
ItemType.CAPTURE: Capture,
}
- def post(
+ def post( # noqa: PLR0911
self,
request: HttpRequest,
item_uuid: UUID,
From 85599c415db303ed025c5b12e4adb2998eb2dd85 Mon Sep 17 00:00:00 2001
From: klpoland
Date: Wed, 8 Apr 2026 15:35:02 -0400
Subject: [PATCH 9/9] add time filtering to new views file
---
gateway/pyproject.toml | 1 +
gateway/sds_gateway/users/views/downloads.py | 64 +++++++++++++++++++-
2 files changed, 64 insertions(+), 1 deletion(-)
diff --git a/gateway/pyproject.toml b/gateway/pyproject.toml
index 4c3f2ebf7..42663a3e9 100644
--- a/gateway/pyproject.toml
+++ b/gateway/pyproject.toml
@@ -28,6 +28,7 @@
"djangorestframework>=3.15.2",
"drf-spectacular>=0.27.2",
"environs[django]>=14.1.1",
+ "faker>=24.0.0",
"factory-boy>=3.3.1",
"fido2>=2.1.1",
"flower>=2.0.1",
diff --git a/gateway/sds_gateway/users/views/downloads.py b/gateway/sds_gateway/users/views/downloads.py
index 063717ac8..8c15584bf 100644
--- a/gateway/sds_gateway/users/views/downloads.py
+++ b/gateway/sds_gateway/users/views/downloads.py
@@ -128,6 +128,52 @@ def _serve_file_download(self, zip_uuid: str, user) -> HttpResponse:
user_temporary_zip_download_view = TemporaryZipDownloadView.as_view()
+def _parse_optional_time(
+ raw_value: str | None, param_name: str
+) -> tuple[int | None, JsonResponse | None]:
+ """Parse optional start/end time.
+
+ Returns (value, None), or (None, error JsonResponse).
+ """
+ if raw_value in (None, ""):
+ return None, None
+ try:
+ value = int(raw_value)
+ except (TypeError, ValueError):
+ return None, JsonResponse(
+ {
+ "success": False,
+ "message": f"Invalid {param_name}; it must be an integer value.",
+ },
+ status=400,
+ )
+ if value < 0:
+ message = f"Invalid {param_name}; it must be greater than or equal to 0."
+ return None, JsonResponse(
+ {
+ "success": False,
+ "message": message,
+ },
+ status=400,
+ )
+ return value, None
+
+
+def _validate_time_range(
+ start_time: int | None, end_time: int | None
+) -> JsonResponse | None:
+ """Return 400 JsonResponse if both provided and start >= end; else None."""
+ if start_time is not None and end_time is not None and start_time >= end_time:
+ return JsonResponse(
+ {
+ "success": False,
+ "message": "Invalid time range; start_time must be less than end_time.",
+ },
+ status=400,
+ )
+ return None
+
+
class DownloadItemView(Auth0LoginRequiredMixin, View):
"""
Unified view to handle item download requests for both datasets and captures.
@@ -142,7 +188,7 @@ class DownloadItemView(Auth0LoginRequiredMixin, View):
ItemType.CAPTURE: Capture,
}
- def post(
+ def post( # noqa: PLR0911
self,
request: HttpRequest,
item_uuid: UUID,
@@ -161,6 +207,20 @@ def post(
Returns:
A JSON response containing the download status
"""
+ # Optional start and end times for temporal filtering
+ raw_start_time = request.POST.get("start_time")
+ raw_end_time = request.POST.get("end_time")
+
+ start_time, err = _parse_optional_time(raw_start_time, "start_time")
+ if err is not None:
+ return err
+ end_time, err = _parse_optional_time(raw_end_time, "end_time")
+ if err is not None:
+ return err
+ err = _validate_time_range(start_time, end_time)
+ if err is not None:
+ return err
+
# Validate item type
if item_type not in self.ITEM_MODELS:
return JsonResponse(
@@ -227,6 +287,8 @@ def post(
str(item.uuid),
str(request.user.id),
item_type,
+ start_time=start_time,
+ end_time=end_time,
)
return JsonResponse(