From 653cc892f01f809b9466e4e0910e722a1d72c4ed Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 09:54:12 +0100 Subject: [PATCH 01/35] add product_name arg --- .../modules/db/access_layer.py | 43 ++++++++++++------- 1 file changed, 28 insertions(+), 15 deletions(-) diff --git a/services/storage/src/simcore_service_storage/modules/db/access_layer.py b/services/storage/src/simcore_service_storage/modules/db/access_layer.py index d7191be77d7b..fa64a20189dc 100644 --- a/services/storage/src/simcore_service_storage/modules/db/access_layer.py +++ b/services/storage/src/simcore_service_storage/modules/db/access_layer.py @@ -1,4 +1,4 @@ -"""Helper functions to determin access-rights on stored data +"""Helper functions to determine access-rights on stored data # DRAFT Rationale: @@ -40,6 +40,7 @@ import sqlalchemy as sa from models_library.groups import GroupID +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import StorageFileID from models_library.users import UserID @@ -144,26 +145,29 @@ def my_shared_workspace_access_rights_subquery(user_group_ids: list[GroupID]): async def _list_user_projects_access_rights_with_read_access( - connection: AsyncConnection, user_id: UserID + conn: AsyncConnection, + user_id: UserID, + product_name: ProductName, ) -> list[ProjectID]: """ Returns access-rights of user (user_id) over all OWNED or SHARED projects """ - user_group_ids: list[GroupID] = await _get_user_groups_ids(connection, user_id) - _my_access_rights_subquery = my_private_workspace_access_rights_subquery( + user_group_ids: list[GroupID] = await _get_user_groups_ids(conn, user_id) + my_access_rights_subquery = my_private_workspace_access_rights_subquery( user_group_ids ) private_workspace_query = ( - sa.select( - projects.c.uuid, + sa.select(projects.c.uuid) + .select_from(projects.join(my_access_rights_subquery)) + .where( + (projects.c.workspace_id.is_(None)) + & (projects.c.product_name == f"{product_name}") ) - .select_from(projects.join(_my_access_rights_subquery)) - .where(projects.c.workspace_id.is_(None)) ) - _my_workspace_access_rights_subquery = my_shared_workspace_access_rights_subquery( + my_workspace_access_rights_subquery = my_shared_workspace_access_rights_subquery( user_group_ids ) @@ -171,19 +175,22 @@ async def _list_user_projects_access_rights_with_read_access( sa.select(projects.c.uuid) .select_from( projects.join( - _my_workspace_access_rights_subquery, + my_workspace_access_rights_subquery, projects.c.workspace_id - == _my_workspace_access_rights_subquery.c.workspace_id, + == my_workspace_access_rights_subquery.c.workspace_id, ) ) - .where(projects.c.workspace_id.is_not(None)) + .where( + (projects.c.workspace_id.is_not(None)) + & (projects.c.product_name == f"{product_name}") + ) ) combined_query = sa.union_all(private_workspace_query, shared_workspace_query) projects_access_rights = [] - async for row in await connection.stream(combined_query): + async for row in await conn.stream(combined_query): assert isinstance(row.uuid, str) # nosec projects_access_rights.append(ProjectID(row.uuid)) @@ -375,10 +382,16 @@ async def get_file_access_rights( ) async def get_readable_project_ids( - self, *, connection: AsyncConnection | None = None, user_id: UserID + self, + *, + connection: AsyncConnection | None = None, + user_id: UserID, + product_name: ProductName, ) -> list[ProjectID]: """Returns a list of projects where user has granted read-access""" async with pass_or_acquire_connection(self.db_engine, connection) as conn: return await _list_user_projects_access_rights_with_read_access( - conn, user_id + conn, + user_id, + product_name, ) From ea165052f51e4dde7e158431caa4e29e14cfbc67 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 10:25:35 +0100 Subject: [PATCH 02/35] fix missing arg --- .../simcore_service_storage/datcore_dsm.py | 40 ++++++++-- .../simcore_service_storage/dsm_factory.py | 18 ++++- .../simcore_service_storage/simcore_s3_dsm.py | 73 ++++++++++++++----- .../utils/simcore_s3_dsm_utils.py | 19 +++-- 4 files changed, 118 insertions(+), 32 deletions(-) diff --git a/services/storage/src/simcore_service_storage/datcore_dsm.py b/services/storage/src/simcore_service_storage/datcore_dsm.py index fef1aa992d66..67e09598e619 100644 --- a/services/storage/src/simcore_service_storage/datcore_dsm.py +++ b/services/storage/src/simcore_service_storage/datcore_dsm.py @@ -13,6 +13,7 @@ UploadedPart, ) from models_library.basic_types import SHA256Str +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID @@ -81,14 +82,25 @@ async def authorized(self, user_id: UserID) -> bool: ) return False - async def list_datasets(self, user_id: UserID) -> list[DatasetMetaData]: + async def list_datasets( + self, user_id: UserID, product_name: ProductName + ) -> list[DatasetMetaData]: + _ = product_name + api_token, api_secret = await self._get_datcore_tokens(user_id) api_token, api_secret = _check_api_credentials(api_token, api_secret) return await datcore_adapter.list_all_datasets(self.app, api_token, api_secret) async def list_files_in_dataset( - self, user_id: UserID, dataset_id: str, *, expand_dirs: bool + self, + user_id: UserID, + product_name: ProductName, + dataset_id: str, + *, + expand_dirs: bool, ) -> list[FileMetaData]: + _ = product_name, expand_dirs + api_token, api_secret = await self._get_datcore_tokens(user_id) api_token, api_secret = _check_api_credentials(api_token, api_secret) return await datcore_adapter.list_all_files_metadatas_in_dataset( @@ -98,12 +110,15 @@ async def list_files_in_dataset( async def list_paths( self, user_id: UserID, + product_name: ProductName, *, file_filter: Path | None, cursor: GenericCursor | None, limit: NonNegativeInt, ) -> tuple[list[PathMetaData], GenericCursor | None, TotalNumber | None]: """returns a page of the file meta data a user has access to""" + _ = product_name + api_token, api_secret = await self._get_datcore_tokens(user_id) api_token, api_secret = _check_api_credentials(api_token, api_secret) if not file_filter: @@ -191,8 +206,12 @@ async def list_paths( 1, ) - async def compute_path_size(self, user_id: UserID, *, path: Path) -> ByteSize: + async def compute_path_size( + self, user_id: UserID, product_name: ProductName, *, path: Path + ) -> ByteSize: """returns the total size of an arbitrary path""" + _ = product_name + api_token, api_secret = await self._get_datcore_tokens(user_id) api_token, api_secret = _check_api_credentials(api_token, api_secret) @@ -216,7 +235,11 @@ async def compute_path_size(self, user_id: UserID, *, path: Path) -> ByteSize: while paths_to_process: current_path = paths_to_process.pop() paths, cursor, _ = await self.list_paths( - user_id, file_filter=current_path, cursor=None, limit=50 + user_id, + product_name=product_name, + file_filter=current_path, + cursor=None, + limit=50, ) while paths: @@ -237,7 +260,11 @@ async def compute_path_size(self, user_id: UserID, *, path: Path) -> ByteSize: if cursor: paths, cursor, _ = await self.list_paths( - user_id, file_filter=current_path, cursor=cursor, limit=50 + user_id, + product_name=product_name, + file_filter=current_path, + cursor=cursor, + limit=50, ) else: break @@ -251,11 +278,14 @@ async def compute_path_size(self, user_id: UserID, *, path: Path) -> ByteSize: async def list_files( self, user_id: UserID, + product_name: ProductName, *, expand_dirs: bool, uuid_filter: str, project_id: ProjectID | None, ) -> list[FileMetaData]: + _ = product_name, expand_dirs, uuid_filter, project_id + api_token, api_secret = await self._get_datcore_tokens(user_id) api_token, api_secret = _check_api_credentials(api_token, api_secret) return await datcore_adapter.list_all_datasets_files_metadatas( diff --git a/services/storage/src/simcore_service_storage/dsm_factory.py b/services/storage/src/simcore_service_storage/dsm_factory.py index 749bbf9a5e61..6560b4f56ad1 100644 --- a/services/storage/src/simcore_service_storage/dsm_factory.py +++ b/services/storage/src/simcore_service_storage/dsm_factory.py @@ -6,6 +6,7 @@ from fastapi import FastAPI from models_library.api_schemas_storage.storage_schemas import LinkType, UploadedPart from models_library.basic_types import SHA256Str +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID @@ -47,12 +48,19 @@ async def authorized(self, user_id: UserID) -> bool: """returns True if user with user_id is authorized to access the storage""" @abstractmethod - async def list_datasets(self, user_id: UserID) -> list[DatasetMetaData]: + async def list_datasets( + self, user_id: UserID, product_name: ProductName + ) -> list[DatasetMetaData]: """returns all the top level datasets a user has access to""" @abstractmethod async def list_files_in_dataset( - self, user_id: UserID, dataset_id: str, *, expand_dirs: bool + self, + user_id: UserID, + product_name: ProductName, + dataset_id: str, + *, + expand_dirs: bool, ) -> list[FileMetaData]: """returns all the file meta data inside dataset with dataset_id""" # NOTE: expand_dirs will be replaced by pagination in the future @@ -61,6 +69,7 @@ async def list_files_in_dataset( async def list_files( self, user_id: UserID, + product_name: ProductName, *, expand_dirs: bool, uuid_filter: str, @@ -73,6 +82,7 @@ async def list_files( async def list_paths( self, user_id: UserID, + product_name: ProductName, *, file_filter: Path | None, cursor: GenericCursor | None, @@ -81,7 +91,9 @@ async def list_paths( """returns a page of the file meta data a user has access to""" @abstractmethod - async def compute_path_size(self, user_id: UserID, *, path: Path) -> ByteSize: + async def compute_path_size( + self, user_id: UserID, product_name: ProductName, *, path: Path + ) -> ByteSize: """returns the total size of an arbitrary path""" @abstractmethod diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index 1a2751e53ecb..d8176b6387b8 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -29,6 +29,7 @@ UploadedPart, ) from models_library.basic_types import SHA256Str +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import ( LocationID, @@ -115,7 +116,7 @@ async def _add_frontend_needed_data( project_ids: list[ProjectID], data: list[FileMetaData], ) -> list[FileMetaData]: - # artifically fills ['project_name', 'node_name', 'file_id', 'raw_file_path', 'display_file_path'] + # artificially fills ['project_name', 'node_name', 'file_id', 'raw_file_path', 'display_file_path'] # with information from the projects table! # NOTE: This part with the projects, should be done in the client code not here! @@ -160,10 +161,15 @@ def get_location_name(cls) -> str: async def authorized(self, _user_id: UserID) -> bool: return True # always true for now - async def list_datasets(self, user_id: UserID) -> list[DatasetMetaData]: + async def list_datasets( + self, user_id: UserID, product_name: ProductName + ) -> list[DatasetMetaData]: readable_projects_ids = await AccessLayerRepository.instance( get_db_engine(self.app) - ).get_readable_project_ids(user_id=user_id) + ).get_readable_project_ids( + user_id=user_id, + product_name=product_name, + ) return [ DatasetMetaData( @@ -176,11 +182,17 @@ async def list_datasets(self, user_id: UserID) -> list[DatasetMetaData]: ] async def list_files_in_dataset( - self, user_id: UserID, dataset_id: str, *, expand_dirs: bool + self, + user_id: UserID, + product_name: ProductName, + dataset_id: str, + *, + expand_dirs: bool, ) -> list[FileMetaData]: # NOTE: expand_dirs will be replaced by pagination in the future data: list[FileMetaData] = await self.list_files( user_id, + product_name, expand_dirs=expand_dirs, uuid_filter=ensure_ends_with(dataset_id, "/"), project_id=None, @@ -190,6 +202,7 @@ async def list_files_in_dataset( async def list_paths( self, user_id: UserID, + product_name: ProductName, *, file_filter: Path | None, cursor: GenericCursor | None, @@ -206,7 +219,10 @@ async def list_paths( project_id = ProjectID(file_filter.parts[0]) if file_filter else None accessible_projects_ids = await get_accessible_project_ids( - get_db_engine(self.app), user_id=user_id, project_id=project_id + get_db_engine(self.app), + user_id=user_id, + product_name=product_name, + project_id=project_id, ) # check if the file_filter is a directory or inside one @@ -258,7 +274,13 @@ async def list_paths( return paths_metadata, next_cursor, total - async def compute_path_size(self, user_id: UserID, *, path: Path) -> ByteSize: + async def compute_path_size( + self, + user_id: UserID, + product_name: ProductName, + *, + path: Path, + ) -> ByteSize: """returns the total size of an arbitrary path""" # check access rights first project_id = None @@ -267,7 +289,10 @@ async def compute_path_size(self, user_id: UserID, *, path: Path) -> ByteSize: project_id = ProjectID(path.parts[0]) accessible_projects_ids = await get_accessible_project_ids( - get_db_engine(self.app), user_id=user_id, project_id=project_id + get_db_engine(self.app), + user_id=user_id, + product_name=product_name, + project_id=project_id, ) # use-cases: @@ -326,6 +351,7 @@ async def compute_path_size(self, user_id: UserID, *, path: Path) -> ByteSize: async def list_files( self, user_id: UserID, + product_name: ProductName, *, expand_dirs: bool, uuid_filter: str, @@ -358,7 +384,8 @@ async def list_files( uid = None else: accessible_projects_ids = await access_layer_repo.get_readable_project_ids( - user_id=user_id + user_id=user_id, + product_name=product_name, ) uid = user_id file_and_directory_meta_data = await FileMetaDataRepository.instance( @@ -375,7 +402,7 @@ async def list_files( # add all the entries from file_meta_data without for metadata in file_and_directory_meta_data: - # below checks ensures that directoris either appear as + # below checks ensures that directories either appear as if metadata.is_directory and expand_dirs: # avoids directory files and does not add any directory entry to the result continue @@ -853,7 +880,7 @@ async def deep_copy_project_simcore_s3( task_progress, src_project_total_data_size, task_progress_message_prefix=f"Copying {total_num_of_files} files to '{dst_project['name']}'", - ) as s3_transfered_data_cb: + ) as s3_transferred_data_cb: with log_context( _logger, logging.INFO, @@ -879,7 +906,7 @@ async def deep_copy_project_simcore_s3( ).validate_python( f"{dst_project_uuid}/{new_node_id}/{src_fmd.object_name.split('/', maxsplit=2)[-1]}" ), - bytes_transfered_cb=s3_transfered_data_cb.copy_transfer_cb, + bytes_transferred_cb=s3_transferred_data_cb.copy_transfer_cb, ) ) with log_context( @@ -897,7 +924,7 @@ async def deep_copy_project_simcore_s3( dest_project_id=dst_project_uuid, dest_node_id=NodeID(node_id), file_storage_link=output, - bytes_transfered_cb=s3_transfered_data_cb.upload_transfer_cb, + bytes_transferred_cb=s3_transferred_data_cb.upload_transfer_cb, ) for output in node.get("outputs", {}).values() if isinstance(output, dict) @@ -1152,6 +1179,7 @@ async def _search_project_s3_files( async def search( self, user_id: UserID, + product_name: ProductName, *, name_pattern: str, project_id: ProjectID | None = None, @@ -1178,7 +1206,10 @@ async def search( """ # Validate access rights accessible_projects_ids = await get_accessible_project_ids( - get_db_engine(self.app), user_id=user_id, project_id=project_id + get_db_engine(self.app), + user_id=user_id, + product_name=product_name, + project_id=project_id, ) # Collect all results across projects @@ -1394,7 +1425,7 @@ async def _copy_file_datcore_s3( dest_project_id: ProjectID, dest_node_id: NodeID, file_storage_link: dict[str, Any], - bytes_transfered_cb: UploadedBytesTransferredCallback, + bytes_transferred_cb: UploadedBytesTransferredCallback, ) -> FileMetaData: session = get_client_session(self.app) # 2 steps: Get download link for local copy, then upload to S3 @@ -1431,7 +1462,7 @@ async def _copy_file_datcore_s3( bucket=self.simcore_bucket_name, file=local_file_path, object_key=dst_file_id, - bytes_transfered_cb=bytes_transfered_cb, + bytes_transferred_cb=bytes_transferred_cb, ) updated_fmd = await self._update_database_from_storage(fmd=new_fmd) file_storage_link["store"] = self.location_id @@ -1447,7 +1478,7 @@ async def _copy_path_s3_s3( *, src_fmd: FileMetaDataAtDB, dst_file_id: SimcoreS3FileID, - bytes_transfered_cb: CopiedBytesTransferredCallback, + bytes_transferred_cb: CopiedBytesTransferredCallback, ) -> FileMetaData: with log_context( _logger, @@ -1471,14 +1502,14 @@ async def _copy_path_s3_s3( bucket=self.simcore_bucket_name, src_prefix=src_fmd.object_name, dst_prefix=new_fmd.object_name, - bytes_transfered_cb=bytes_transfered_cb, + bytes_transferred_cb=bytes_transferred_cb, ) else: await s3_client.copy_object( bucket=self.simcore_bucket_name, src_object_key=src_fmd.object_name, dst_object_key=new_fmd.object_name, - bytes_transfered_cb=bytes_transfered_cb, + bytes_transferred_cb=bytes_transferred_cb, ) # we are done, let's update the copy with the src updated_fmd = await self._update_fmd_from_other( @@ -1520,6 +1551,7 @@ async def _create_fmd_for_upload( async def create_s3_export( self, user_id: UserID, + product_name: ProductName, object_keys: list[S3ObjectKey], *, progress_bar: ProgressBarData, @@ -1538,7 +1570,10 @@ async def create_s3_export( try: accessible_projects_ids = await get_accessible_project_ids( - get_db_engine(self.app), user_id=user_id, project_id=project_id + get_db_engine(self.app), + user_id=user_id, + product_name=product_name, + project_id=project_id, ) except ProjectAccessRightError as err: raise AccessRightError( diff --git a/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py b/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py index a07d9945165f..f0004fc41a58 100644 --- a/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py +++ b/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py @@ -9,6 +9,7 @@ from aws_library.s3._models import S3ObjectKey from common_library.json_serialization import json_dumps, json_loads from models_library.api_schemas_storage.storage_schemas import S3BucketName +from models_library.products import ProductName from models_library.projects import ProjectID, ProjectIDStr from models_library.projects_nodes_io import ( NodeIDStr, @@ -181,9 +182,9 @@ def _base_path_parent(base_path: UserSelectionStr, s3_object: S3ObjectKey) -> st return f"{result}" -def _get_project_ids(user_selecton: set[UserSelectionStr]) -> list[ProjectID]: +def _get_project_ids(user_selection: set[UserSelectionStr]) -> list[ProjectID]: results = [] - for selected in user_selecton: + for selected in user_selection: project_id = ProjectID(Path(selected).parts[0]) results.append(project_id) return results @@ -221,7 +222,9 @@ async def create_and_upload_export( progress_bar: ProgressBarData, ) -> None: ids_names_map = await project_repository.get_project_id_and_node_id_to_names_map( - project_uuids=_get_project_ids(user_selecton={x[0] for x in source_object_keys}) + project_uuids=_get_project_ids( + user_selection={x[0] for x in source_object_keys} + ) ) archive_entries: ArchiveEntries = [ @@ -340,7 +343,11 @@ async def list_child_paths_from_repository( async def get_accessible_project_ids( - db_engine: AsyncEngine, *, user_id: UserID, project_id: ProjectID | None + db_engine: AsyncEngine, + *, + user_id: UserID, + product_name: ProductName, + project_id: ProjectID | None, ) -> list[ProjectID]: access_layer_repo = AccessLayerRepository.instance(db_engine) if project_id: @@ -350,4 +357,6 @@ async def get_accessible_project_ids( if not project_access_rights.read: raise ProjectAccessRightError(access_right="read", project_id=project_id) return [project_id] - return await access_layer_repo.get_readable_project_ids(user_id=user_id) + return await access_layer_repo.get_readable_project_ids( + user_id=user_id, product_name=product_name + ) From ddd55905c68694adfa53c1e329226b80212aec0a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 10:31:05 +0100 Subject: [PATCH 03/35] fix rename --- .../aws-library/src/aws_library/s3/_client.py | 22 ++++++------ packages/aws-library/tests/test_s3_client.py | 36 +++++++++---------- services/storage/tests/conftest.py | 6 ++-- .../storage/tests/unit/test_handlers_files.py | 2 +- .../storage/tests/unit/test_simcore_s3_dsm.py | 4 +-- 5 files changed, 35 insertions(+), 35 deletions(-) diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index e0f37cf271ff..eb85604802f1 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -325,7 +325,7 @@ async def list_entries_paginated( prefix: str, *, items_per_page: int = _MAX_ITEMS_PER_PAGE, - ) -> AsyncGenerator[list[S3MetaData | S3DirectoryMetaData], None]: + ) -> AsyncGenerator[list[S3MetaData | S3DirectoryMetaData]]: """Breadth-first recursive listing of S3 entries (files + directories). Yields: @@ -564,7 +564,7 @@ async def upload_file( bucket: S3BucketName, file: Path, object_key: S3ObjectKey, - bytes_transfered_cb: UploadedBytesTransferredCallback | None, + bytes_transferred_cb: UploadedBytesTransferredCallback | None, ) -> None: """upload a file using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads)""" upload_options: dict[str, Any] = { @@ -572,10 +572,10 @@ async def upload_file( "Key": object_key, "Config": TransferConfig(max_concurrency=self.transfer_max_concurrency), } - if bytes_transfered_cb: + if bytes_transferred_cb: upload_options |= { "Callback": functools.partial( - bytes_transfered_cb, file_name=f"{object_key}" + bytes_transferred_cb, file_name=f"{object_key}" ) } await self._client.upload_file(f"{file}", **upload_options) @@ -587,7 +587,7 @@ async def copy_object( bucket: S3BucketName, src_object_key: S3ObjectKey, dst_object_key: S3ObjectKey, - bytes_transfered_cb: CopiedBytesTransferredCallback | None, + bytes_transferred_cb: CopiedBytesTransferredCallback | None, object_metadata: S3MetaData | None = None, ) -> None: """copy a file in S3 using aioboto3 transfer manager (e.g. works >5Gb and creates multiple threads)""" @@ -600,22 +600,22 @@ async def copy_object( multipart_threshold=MULTIPART_COPY_THRESHOLD, ), } - if bytes_transfered_cb: + if bytes_transferred_cb: copy_options |= { "Callback": functools.partial( - bytes_transfered_cb, file_name=f"{dst_object_key}" + bytes_transferred_cb, file_name=f"{dst_object_key}" ) } # NOTE: boto3 copy function uses copy_object until 'multipart_threshold' is reached then switches to multipart copy # copy_object does not provide any callbacks so we can't track progress so we need to ensure at least the completion # of the object is tracked await self._client.copy(**copy_options) - if bytes_transfered_cb: + if bytes_transferred_cb: if object_metadata is None: object_metadata = await self.get_object_metadata( bucket=bucket, object_key=dst_object_key ) - bytes_transfered_cb(object_metadata.size, file_name=f"{dst_object_key}") + bytes_transferred_cb(object_metadata.size, file_name=f"{dst_object_key}") @s3_exception_handler(_logger) async def copy_objects_recursively( @@ -624,7 +624,7 @@ async def copy_objects_recursively( bucket: S3BucketName, src_prefix: str, dst_prefix: str, - bytes_transfered_cb: CopiedBytesTransferredCallback | None, + bytes_transferred_cb: CopiedBytesTransferredCallback | None, ) -> None: """copy from 1 location in S3 to another recreating the same structure""" dst_metadata = await self.get_directory_metadata( @@ -638,7 +638,7 @@ async def copy_objects_recursively( bucket=bucket, src_object_key=s3_object.object_key, dst_object_key=s3_object.object_key.replace(src_prefix, dst_prefix), - bytes_transfered_cb=bytes_transfered_cb, + bytes_transferred_cb=bytes_transferred_cb, object_metadata=s3_object, ) async for s3_object in self._list_all_objects( diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index a3b2a379d59a..e5ae2de4c2dc 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -289,14 +289,14 @@ class _UploadProgressCallback: file_size: int action: str logger: logging.Logger - _total_bytes_transfered: int = 0 + _total_bytes_transferred: int = 0 def __call__(self, bytes_transferred: int, *, file_name: str) -> None: - self._total_bytes_transfered += bytes_transferred - assert self._total_bytes_transfered <= self.file_size + self._total_bytes_transferred += bytes_transferred + assert self._total_bytes_transferred <= self.file_size self.logger.info( "progress: %s", - f"{self.action} {file_name=} {self._total_bytes_transfered} / {self.file_size} bytes", + f"{self.action} {file_name=} {self._total_bytes_transferred} / {self.file_size} bytes", ) @@ -305,14 +305,14 @@ class _CopyProgressCallback: file_size: int action: str logger: logging.Logger - _total_bytes_transfered: int = 0 + _total_bytes_transferred: int = 0 def __call__(self, total_bytes_copied: int, *, file_name: str) -> None: - self._total_bytes_transfered = total_bytes_copied - assert self._total_bytes_transfered <= self.file_size + self._total_bytes_transferred = total_bytes_copied + assert self._total_bytes_transferred <= self.file_size self.logger.info( "progress: %s", - f"{self.action} {file_name=} {self._total_bytes_transfered} / {self.file_size} bytes", + f"{self.action} {file_name=} {self._total_bytes_transferred} / {self.file_size} bytes", ) @@ -339,7 +339,7 @@ async def _uploader(file: Path, base_path: Path | None = None) -> UploadedFile: bucket=with_s3_bucket, file=file, object_key=object_key, - bytes_transfered_cb=progress_cb, + bytes_transferred_cb=progress_cb, ) # there is no response from aioboto3... assert not response @@ -432,7 +432,7 @@ async def _copier(src_key: S3ObjectKey, dst_key: S3ObjectKey) -> S3ObjectKey: bucket=with_s3_bucket, src_object_key=src_key, dst_object_key=dst_key, - bytes_transfered_cb=progress_cb, + bytes_transferred_cb=progress_cb, ) copied_object_keys.append(dst_key) return dst_key @@ -467,7 +467,7 @@ async def _copier(src_prefix: str, dst_prefix: str) -> str: bucket=with_s3_bucket, src_prefix=src_prefix, dst_prefix=dst_prefix, - bytes_transfered_cb=progress_cb, + bytes_transferred_cb=progress_cb, ) dst_directory_metadata = await simcore_s3_api.get_directory_metadata( @@ -1599,7 +1599,7 @@ async def test_upload_file_invalid_raises( bucket=non_existing_s3_bucket, file=file, object_key=faker.pystr(), - bytes_transfered_cb=None, + bytes_transferred_cb=None, ) @@ -1654,7 +1654,7 @@ async def test_copy_file_invalid_raises( bucket=non_existing_s3_bucket, src_object_key=uploaded_file.s3_key, dst_object_key=dst_object_key, - bytes_transfered_cb=None, + bytes_transferred_cb=None, ) fake_src_key = faker.file_name() with pytest.raises(S3KeyNotFoundError, match=rf"{fake_src_key}"): @@ -1662,7 +1662,7 @@ async def test_copy_file_invalid_raises( bucket=with_s3_bucket, src_object_key=fake_src_key, dst_object_key=dst_object_key, - bytes_transfered_cb=None, + bytes_transferred_cb=None, ) @@ -1838,7 +1838,7 @@ async def test_copy_files_recursively_raises( bucket=non_existing_s3_bucket, src_prefix="", dst_prefix="", - bytes_transfered_cb=None, + bytes_transferred_cb=None, ) @@ -1924,7 +1924,7 @@ def run_async_test(*args, **kwargs) -> None: ], ids=byte_size_ids, ) -def test_copy_recurively_performance( +def test_copy_recursively_performance( mocked_s3_server_envs: EnvVarsDict, with_uploaded_folder_on_s3: list[UploadedFile], copy_files_recursively: Callable[[str, str], Awaitable[str]], @@ -2070,7 +2070,7 @@ async def path_s3_files_for_archive( @pytest.fixture def archive_download_path(tmp_path: Path, faker: Faker) -> Iterator[Path]: - path = tmp_path / f"downlaoded_ardhive_{faker.uuid4()}.zip" + path = tmp_path / f"downloaded_ardhive_{faker.uuid4()}.zip" yield path if path.exists(): path.unlink() @@ -2119,7 +2119,7 @@ async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_ # - files are read form disk and S3 # - a zip archive is created on the go # - the zip archive is streamed to S3 as soon as chunks inside it are created - # Uses no disk and constant memory for the entire opration. + # Uses no disk and constant memory for the entire operation. # 1. assemble and upload zip archive diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 5c02fa3c4a42..3e96238bf5c4 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -572,7 +572,7 @@ async def _upload_file_to_s3( bucket=s3_bucket, file=local_file, object_key=file_id, - bytes_transfered_cb=None, + bytes_transferred_cb=None, ) return {file_id: FileIDDict(path=local_file, sha256_checksum=f"{faker.sha256()}")} @@ -604,7 +604,7 @@ async def _create_content( s3_base_path = Path(f"{project_id}") / f"{node_id}" / dir_name # NOTE: add a space in the sub directory s3_subdirs = [ - s3_base_path / f"sub-dir_ect ory-{i}" for i in range(subdir_count) + s3_base_path / f"sub-dir_etc ory-{i}" for i in range(subdir_count) ] # Randomly distribute files across subdirectories selected_subdirs = random.choices(s3_subdirs, k=file_count) # noqa: S311 @@ -1028,7 +1028,7 @@ async def with_storage_celery_worker( monkeypatch: pytest.MonkeyPatch, register_celery_tasks: Callable[[Celery], None], ) -> AsyncIterator[TestWorkController]: - # Signals must be explicitily connected + # Signals must be explicitly connected tracing_config = TracingConfig.create( tracing_settings=None, # disable tracing in tests service_name="storage-api", diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index f07b63cdbe92..85a11deb9659 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -994,7 +994,7 @@ async def test_download_file_from_inside_a_directory( bucket=storage_s3_bucket, file=file_to_upload_in_dir, object_key=s3_file_id, - bytes_transfered_cb=None, + bytes_transferred_cb=None, ) assert ( await storage_s3_client.object_exists( diff --git a/services/storage/tests/unit/test_simcore_s3_dsm.py b/services/storage/tests/unit/test_simcore_s3_dsm.py index 0b8583d5c5e6..389bad89e34e 100644 --- a/services/storage/tests/unit/test_simcore_s3_dsm.py +++ b/services/storage/tests/unit/test_simcore_s3_dsm.py @@ -103,7 +103,7 @@ async def _copy_s3_path(s3_file_id_to_copy: SimcoreS3FileID) -> None: user_id=user_id, src_fmd=existing_fmd, dst_file_id=_get_dest_file_id(s3_file_id_to_copy), - bytes_transfered_cb=mock_copy_transfer_cb, + bytes_transferred_cb=mock_copy_transfer_cb, ) async def _count_files(s3_file_id: SimcoreS3FileID, expected_count: int) -> None: @@ -483,7 +483,7 @@ async def _progress_cb(report: ProgressReport) -> None: user_id, selection_to_export, progress_bar=root_progress_bar ) cleanup_files_closure(file_id) - # count=2 -> the direcotory and the .zip export + # count=2 -> the directory and the .zip export await _assert_meta_data_entries_count( sqlalchemy_async_engine, count=initial_fmd_count + 1 ) From 52464ad758999c203cccd612a9b145f880d870bb Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 10:40:04 +0100 Subject: [PATCH 04/35] fix missing arg --- .../api/_worker_tasks/_paths.py | 10 ++++++++-- .../api/_worker_tasks/_simcore_s3.py | 18 +++++++++++++++--- .../api/rest/_datasets.py | 3 ++- .../simcore_service_storage/api/rest/_files.py | 3 ++- .../simcore_service_storage/api/rest/_paths.py | 11 ++++++++--- .../src/simcore_service_storage/models.py | 2 ++ 6 files changed, 37 insertions(+), 10 deletions(-) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py index 0401d2400c89..1b7a05e2dca7 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py @@ -3,6 +3,7 @@ from celery import Task # type: ignore[import-untyped] from celery_library.worker.app_server import get_app_server +from models_library.products import ProductName from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter @@ -17,7 +18,12 @@ async def compute_path_size( - task: Task, task_key: TaskKey, user_id: UserID, location_id: LocationID, path: Path + task: Task, + task_key: TaskKey, + user_id: UserID, + product_name: ProductName, + location_id: LocationID, + path: Path, ) -> ByteSize: assert task_key # nosec with log_context( @@ -26,7 +32,7 @@ async def compute_path_size( msg=f"computing path size {user_id=}, {location_id=}, {path=}", ): dsm = get_dsm_provider(get_app_server(task.app).app).get(location_id) - return await dsm.compute_path_size(user_id, path=Path(path)) + return await dsm.compute_path_size(user_id, product_name, path=Path(path)) async def delete_paths( diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py index 9fe42b3e476f..15ad05db3c24 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py @@ -14,6 +14,7 @@ PresignedLink, ) from models_library.api_schemas_webserver.storage import PathToExport +from models_library.products import ProductName from models_library.progress_bar import ProgressReport from models_library.projects import ProjectID from models_library.projects_nodes_io import StorageFileID @@ -76,6 +77,7 @@ async def export_data( task_key: TaskKey, *, user_id: UserID, + product_name: ProductName, paths_to_export: list[PathToExport], ) -> StorageFileID: """ @@ -112,7 +114,10 @@ async def _progress_cb(report: ProgressReport) -> None: progress_report_cb=_progress_cb, ) as progress_bar: return await dsm.create_s3_export( - user_id, object_keys, progress_bar=progress_bar + user_id, + product_name, + object_keys, + progress_bar=progress_bar, ) @@ -121,13 +126,18 @@ async def export_data_as_download_link( task_key: TaskKey, *, user_id: UserID, + product_name: ProductName, paths_to_export: list[PathToExport], ) -> PresignedLink: """ AccessRightError: in case user can't access project """ s3_object = await export_data( - task=task, task_key=task_key, user_id=user_id, paths_to_export=paths_to_export + task=task, + task_key=task_key, + user_id=user_id, + product_name=product_name, + paths_to_export=paths_to_export, ) dsm = get_dsm_provider(get_app_server(task.app).app).get( @@ -145,6 +155,7 @@ async def search( task_key: TaskKey, *, user_id: UserID, + product_name: ProductName, project_id: ProjectID | None, name_pattern: str, modified_at: tuple[datetime.datetime | None, datetime.datetime | None] | None, @@ -162,7 +173,8 @@ async def search( assert isinstance(dsm, SimcoreS3DataManager) # nosec async for items in dsm.search( - user_id=user_id, + user_id, + product_name, project_id=project_id, name_pattern=name_pattern, modified_at=modified_at, diff --git a/services/storage/src/simcore_service_storage/api/rest/_datasets.py b/services/storage/src/simcore_service_storage/api/rest/_datasets.py index 76e6e1850683..ba93e48fcc1b 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_datasets.py +++ b/services/storage/src/simcore_service_storage/api/rest/_datasets.py @@ -31,7 +31,7 @@ async def list_datasets_metadata( request: Request, ) -> Envelope[list[DatasetMetaDataGet]]: dsm = get_dsm_provider(request.app).get(location_id) - data = await dsm.list_datasets(query_params.user_id) + data = await dsm.list_datasets(query_params.user_id, query_params.product_name) return Envelope[list[DatasetMetaDataGet]]( data=[DatasetMetaDataGet(**d.model_dump()) for d in data] ) @@ -50,6 +50,7 @@ async def list_dataset_files_metadata( dsm = get_dsm_provider(request.app).get(location_id) data = await dsm.list_files_in_dataset( user_id=query_params.user_id, + product_name=query_params.product_name, dataset_id=dataset_id, expand_dirs=query_params.expand_dirs, ) diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 8bb3e8ff7562..43d1582cb891 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -72,6 +72,7 @@ async def list_files_metadata( dsm = get_dsm_provider(request.app).get(location_id) data: list[FileMetaData] = await dsm.list_files( user_id=query_params.user_id, + product_name=query_params.product_name, expand_dirs=query_params.expand_dirs, uuid_filter=query_params.uuid_filter or f"{query_params.project_id or ''}", # NOTE: https://github.com/ITISFoundation/osparc-issues/issues/1593 @@ -114,7 +115,7 @@ async def get_file_metadata( ) if user_agent == "OpenAPI-Generator/0.1.0/python": - # NOTE: LEGACY compatiblity with API v0.1.0 + # NOTE: LEGACY compatibility with API v0.1.0 # SEE models used in sdk in: # https://github.com/ITISFoundation/osparc-simcore/blob/cfdf4f86d844ebb362f4f39e9c6571d561b72897/services/storage/client-sdk/python/simcore_service_storage_sdk/models/file_meta_data_enveloped.py#L34 # https://github.com/ITISFoundation/osparc-simcore/blob/cfdf4f86d844ebb362f4f39e9c6571d561b72897/services/storage/client-sdk/python/simcore_service_storage_sdk/models/file_meta_data_type.py#L34 diff --git a/services/storage/src/simcore_service_storage/api/rest/_paths.py b/services/storage/src/simcore_service_storage/api/rest/_paths.py index bdb5a171f0cc..98045741614b 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_paths.py +++ b/services/storage/src/simcore_service_storage/api/rest/_paths.py @@ -9,6 +9,7 @@ PathTotalSizeCreate, ) from models_library.generics import Envelope +from models_library.products import ProductName from models_library.users import UserID from servicelib.fastapi.rest_pagination import ( CustomizedPathsCursorPage, @@ -16,7 +17,7 @@ ) from ...dsm_factory import BaseDataManager -from .dependencies.dsm_prodiver import get_data_manager +from .dependencies.dsm_provider import get_data_manager _logger = logging.getLogger(__name__) @@ -35,11 +36,13 @@ async def list_paths( page_params: Annotated[CustomizedPathsCursorPageParams, Depends()], dsm: Annotated[BaseDataManager, Depends(get_data_manager)], user_id: UserID, + product_name: ProductName, file_filter: Path | None = None, ): """Returns one level of files (paginated)""" items, next_cursor, total_number = await dsm.list_paths( - user_id=user_id, + user_id, + product_name, file_filter=file_filter, limit=page_params.size, cursor=page_params.to_raw_params().cursor, @@ -59,10 +62,12 @@ async def list_paths( async def compute_path_size( dsm: Annotated[BaseDataManager, Depends(get_data_manager)], user_id: UserID, + product_name: ProductName, path: Path, ): return Envelope[PathTotalSizeCreate]( data=PathTotalSizeCreate( - path=path, size=await dsm.compute_path_size(user_id, path=path) + path=path, + size=await dsm.compute_path_size(user_id, product_name, path=path), ) ) diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index bc4f24cc7dee..7d085b1cc114 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -19,6 +19,7 @@ S3BucketName, ) from models_library.basic_types import SHA256Str +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import ( LocationID, @@ -196,6 +197,7 @@ class UploadLinks: class StorageQueryParamsBase(BaseModel): user_id: UserID + product_name: ProductName model_config = ConfigDict(populate_by_name=True) From e297cc36afecad9f270c21a36ce31bb7fc44b847 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 10:43:04 +0100 Subject: [PATCH 05/35] fix typo --- .../api/rest/dependencies/{dsm_prodiver.py => dsm_provider.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/storage/src/simcore_service_storage/api/rest/dependencies/{dsm_prodiver.py => dsm_provider.py} (100%) diff --git a/services/storage/src/simcore_service_storage/api/rest/dependencies/dsm_prodiver.py b/services/storage/src/simcore_service_storage/api/rest/dependencies/dsm_provider.py similarity index 100% rename from services/storage/src/simcore_service_storage/api/rest/dependencies/dsm_prodiver.py rename to services/storage/src/simcore_service_storage/api/rest/dependencies/dsm_provider.py From 5406c903f46bd579691acfeec4780b2150d9af4f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 12:41:14 +0100 Subject: [PATCH 06/35] add product_name arg --- .../api/rest/_files.py | 12 +++- services/storage/tests/conftest.py | 13 ++-- .../tests/unit/test_handlers_simcore_s3.py | 15 +++-- .../storage/tests/unit/test_simcore_s3_dsm.py | 62 ++++++++++++------- .../unit/with_dbs/01/storage/conftest.py | 6 +- 5 files changed, 71 insertions(+), 37 deletions(-) diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 43d1582cb891..7fc265fd0540 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -230,7 +230,9 @@ async def upload_file( ), encoded=True, ) - .with_query(user_id=query_params.user_id) + .with_query( + user_id=query_params.user_id, product_name=query_params.product_name + ) ) complete_url = ( @@ -246,7 +248,9 @@ async def upload_file( ), encoded=True, ) - .with_query(user_id=query_params.user_id) + .with_query( + user_id=query_params.user_id, product_name=query_params.product_name + ) ) v2_response = FileUploadSchema( @@ -322,7 +326,9 @@ async def complete_upload_file( ), encoded=True, ) - .with_query(user_id=query_params.user_id) + .with_query( + user_id=query_params.user_id, product_name=query_params.product_name + ) ) complete_task_state_url = f"{route}" diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 3e96238bf5c4..5a78335f2c8d 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -39,6 +39,7 @@ UploadedPart, ) from models_library.basic_types import SHA256Str +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes import NodeID from models_library.projects_nodes_io import LocationID, SimcoreS3FileID, StorageFileID @@ -327,6 +328,7 @@ async def create_upload_file_link_v2( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, location_id: LocationID, ) -> AsyncIterator[Callable[..., Awaitable[FileUploadSchema]]]: file_params: list[tuple[UserID, int, SimcoreS3FileID]] = [] @@ -340,7 +342,7 @@ async def _link_creator( "upload_file", location_id=f"{location_id}", file_id=file_id, - ).with_query(**query_kwargs, user_id=user_id) + ).with_query(**query_kwargs, user_id=user_id, product_name=product_name) assert ( "file_size" in url.query ), "V2 call to upload file must contain file_size field!" @@ -655,6 +657,7 @@ async def delete_directory( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, location_id: LocationID, ) -> Callable[[StorageFileID], Awaitable[None]]: async def _dir_remover(directory_s3: StorageFileID) -> None: @@ -664,7 +667,7 @@ async def _dir_remover(directory_s3: StorageFileID) -> None: "delete_file", location_id=f"{location_id}", file_id=directory_s3, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.delete(f"{delete_url}") assert_status(response, status.HTTP_204_NO_CONTENT, None) @@ -673,7 +676,9 @@ async def _dir_remover(directory_s3: StorageFileID) -> None: # even if one file is left this will detect it list_files_metadata_url = url_from_operation_id( client, initialized_app, "list_files_metadata", location_id=f"{location_id}" - ).with_query(user_id=user_id, uuid_filter=directory_s3) + ).with_query( + user_id=user_id, product_name=product_name, uuid_filter=directory_s3 + ) response = await client.get(f"{list_files_metadata_url}") data, error = assert_status(response, status.HTTP_200_OK, list[FileMetaDataGet]) assert error is None @@ -1069,7 +1074,7 @@ async def storage_rabbitmq_rpc_client( @pytest.fixture def product_name(faker: Faker) -> str: - return faker.name() + return faker.word() @pytest.fixture diff --git a/services/storage/tests/unit/test_handlers_simcore_s3.py b/services/storage/tests/unit/test_handlers_simcore_s3.py index 6f361c1d7e98..c79050623a16 100644 --- a/services/storage/tests/unit/test_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_handlers_simcore_s3.py @@ -18,6 +18,7 @@ FileMetaDataGet, ) from models_library.basic_types import SHA256Str +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import SimcoreS3FileID from models_library.users import UserID @@ -91,15 +92,21 @@ async def uploaded_file_ids( @pytest.fixture async def search_files_query_params( - query_params_choice: str, user_id: UserID + query_params_choice: str, user_id: UserID, product_name: ProductName ) -> SearchFilesQueryParams: match query_params_choice: case "default": - q = SearchFilesQueryParams(user_id=user_id, kind="owned") + q = SearchFilesQueryParams( + user_id=user_id, product_name=product_name, kind="owned" + ) case "limited": - q = SearchFilesQueryParams(user_id=user_id, kind="owned", limit=1) + q = SearchFilesQueryParams( + user_id=user_id, product_name=product_name, kind="owned", limit=1 + ) case "with_offset": - q = SearchFilesQueryParams(user_id=user_id, kind="owned", offset=1) + q = SearchFilesQueryParams( + user_id=user_id, product_name=product_name, kind="owned", offset=1 + ) case _: pytest.fail(f"Undefined {query_params_choice=}") return q diff --git a/services/storage/tests/unit/test_simcore_s3_dsm.py b/services/storage/tests/unit/test_simcore_s3_dsm.py index 389bad89e34e..68abcd187a3c 100644 --- a/services/storage/tests/unit/test_simcore_s3_dsm.py +++ b/services/storage/tests/unit/test_simcore_s3_dsm.py @@ -12,6 +12,7 @@ from aws_library.s3._models import S3ObjectKey from faker import Faker from models_library.basic_types import SHA256Str +from models_library.products import ProductName from models_library.progress_bar import ProgressReport from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID @@ -171,6 +172,7 @@ async def test_upload_and_search( async def _search_files_by_pattern( simcore_s3_dsm: SimcoreS3DataManager, user_id: UserID, + product_name: ProductName, name_pattern: str, project_id: ProjectID | None = None, items_per_page: int = 10, @@ -178,7 +180,8 @@ async def _search_files_by_pattern( """Helper function to search files and collect all results.""" results = [] async for page in simcore_s3_dsm.search( - user_id=user_id, + user_id, + product_name, name_pattern=name_pattern, project_id=project_id, limit=items_per_page, @@ -198,6 +201,7 @@ async def test_search_files( upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], file_size: ByteSize, user_id: UserID, + product_name: ProductName, project_id: ProjectID, faker: Faker, ): @@ -225,7 +229,7 @@ async def test_search_files( # Test 1: Search for all .txt files txt_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "*.txt", project_id + simcore_s3_dsm, user_id, product_name, "*.txt", project_id ) assert ( len(txt_results) == 4 @@ -240,26 +244,28 @@ async def test_search_files( # Test 2: Search with specific prefix pattern data_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "data_*", project_id + simcore_s3_dsm, user_id, product_name, "data_*", project_id ) assert len(data_results) == 1 assert data_results[0].file_name == "data_file.csv" # Test 3: Search with pattern that matches multiple extensions temp_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "temp_*", project_id + simcore_s3_dsm, user_id, product_name, "temp_*", project_id ) assert len(temp_results) == 1 assert temp_results[0].file_name == "temp_data.tmp" # Test 4: Search with pattern that doesn't match anything no_match_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "nonexistent_*", project_id + simcore_s3_dsm, user_id, product_name, "nonexistent_*", project_id ) assert len(no_match_results) == 0 # Test 5: Search without project_id restriction (all accessible projects) - all_results = await _search_files_by_pattern(simcore_s3_dsm, user_id, "*") + all_results = await _search_files_by_pattern( + simcore_s3_dsm, user_id, product_name, "*" + ) assert len(all_results) >= len(test_files) # Verify that each result has expected FileMetaData structure @@ -272,7 +278,7 @@ async def test_search_files( # Test 6: Test ? wildcard - single character match single_char_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "file_?.log", project_id + simcore_s3_dsm, user_id, product_name, "file_?.log", project_id ) # Should find 2 files: file_a.log and file_b.log (but not file_10.log) assert len(single_char_results) == 2 @@ -281,7 +287,7 @@ async def test_search_files( # Test 7: Test ?? wildcard - two character match double_char_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "file_??.log", project_id + simcore_s3_dsm, user_id, product_name, "file_??.log", project_id ) # Should find 1 file: file_10.log assert len(double_char_results) == 1 @@ -289,7 +295,7 @@ async def test_search_files( # Test 8: Test ? wildcard with specific prefix and suffix report_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "report?.txt", project_id + simcore_s3_dsm, user_id, product_name, "report?.txt", project_id ) # Should find 2 files: report1.txt and report2.txt assert len(report_results) == 2 @@ -300,7 +306,8 @@ async def test_search_files( paginated_results = [] page_count = 0 async for page in simcore_s3_dsm.search( - user_id=user_id, + user_id, + product_name, name_pattern="*", project_id=project_id, limit=2, # Small page size to test pagination @@ -326,6 +333,7 @@ async def test_search_files_case_insensitive( upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], file_size: ByteSize, user_id: UserID, + product_name: ProductName, project_id: ProjectID, faker: Faker, ): @@ -346,7 +354,7 @@ async def test_search_files_case_insensitive( # Test case-insensitive extension matching case_insensitive_txt = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "*.txt", project_id + simcore_s3_dsm, user_id, product_name, "*.txt", project_id ) txt_file_names = {file.file_name for file in case_insensitive_txt} assert "TestFile.TXT" in txt_file_names @@ -354,33 +362,33 @@ async def test_search_files_case_insensitive( # Test case-insensitive prefix matching case_insensitive_data = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "data*", project_id + simcore_s3_dsm, user_id, product_name, "data*", project_id ) data_file_names = {file.file_name for file in case_insensitive_data} assert "DataFile.CSV" in data_file_names # Test mixed case pattern matching case_insensitive_config = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "CONFIG*", project_id + simcore_s3_dsm, user_id, product_name, "CONFIG*", project_id ) config_file_names = {file.file_name for file in case_insensitive_config} assert "ConfigFile.JSON" in config_file_names assert "config.json" in config_file_names case_insensitive_xml = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "*.XML", project_id + simcore_s3_dsm, user_id, product_name, "*.XML", project_id ) xml_file_names = {file.file_name for file in case_insensitive_xml} assert "XMLDataFile.xml" in xml_file_names camelcase_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "camelcase*", project_id + simcore_s3_dsm, user_id, product_name, "camelcase*", project_id ) assert len(camelcase_results) == 1 assert camelcase_results[0].file_name == "CamelCaseFile.txt" pdf_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "*.PDF", project_id + simcore_s3_dsm, user_id, product_name, "*.PDF", project_id ) pdf_file_names = {file.file_name for file in pdf_results} assert "MyDocument.PDF" in pdf_file_names @@ -456,6 +464,7 @@ async def _assert_meta_data_entries_count( async def test_create_s3_export( simcore_s3_dsm: SimcoreS3DataManager, user_id: UserID, + product_name: ProductName, paths_for_export: set[SimcoreS3FileID], sqlalchemy_async_engine: AsyncEngine, cleanup_files_closure: Callable[[SimcoreS3FileID], None], @@ -480,7 +489,10 @@ async def _progress_cb(report: ProgressReport) -> None: num_steps=1, description="data export", progress_report_cb=_progress_cb ) as root_progress_bar: file_id = await simcore_s3_dsm.create_s3_export( - user_id, selection_to_export, progress_bar=root_progress_bar + user_id, + product_name, + selection_to_export, + progress_bar=root_progress_bar, ) cleanup_files_closure(file_id) # count=2 -> the directory and the .zip export @@ -513,6 +525,7 @@ async def test_create_s3_export_abort_upload_upon_error( mock_create_and_upload_export_raises_error: None, simcore_s3_dsm: SimcoreS3DataManager, user_id: UserID, + product_name: ProductName, sqlalchemy_async_engine: AsyncEngine, ): await _assert_meta_data_entries_count(sqlalchemy_async_engine, count=0) @@ -521,7 +534,7 @@ async def test_create_s3_export_abort_upload_upon_error( num_steps=1, description="data export" ) as progress_bar: await simcore_s3_dsm.create_s3_export( - user_id, [], progress_bar=progress_bar + user_id, product_name, [], progress_bar=progress_bar ) await _assert_meta_data_entries_count(sqlalchemy_async_engine, count=0) @@ -543,6 +556,7 @@ async def test_search_directories( upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], file_size: ByteSize, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, faker: Faker, @@ -585,7 +599,7 @@ async def test_search_directories( # Test 1: Search for directories with "test_dir" pattern dir_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "test_dir*", project_id + simcore_s3_dsm, user_id, product_name, "test_dir*", project_id ) # Should find 2 directories: test_dir_1 and test_dir_2 assert len(dir_results) == 2 @@ -594,7 +608,7 @@ async def test_search_directories( # Test 2: Search for directories with "_dir" suffix dir_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "*_dir", project_id + simcore_s3_dsm, user_id, product_name, "*_dir", project_id ) assert len(dir_results) == 1 # test_dir_1, test_dir_2, config_dir dir_names = {f.file_name for f in dir_results if f.is_directory} @@ -602,7 +616,7 @@ async def test_search_directories( # Test 3: Search for directories with "folder" in name folder_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "*folder*", project_id + simcore_s3_dsm, user_id, product_name, "*folder*", project_id ) assert len(folder_results) == 2 # data_folder, temp_folder dir_names = {f.file_name for f in folder_results} @@ -610,7 +624,7 @@ async def test_search_directories( # Test 4: Search with pattern that matches both files and directories data_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "data_*", project_id + simcore_s3_dsm, user_id, product_name, "data_*", project_id ) # Should find both data_folder (directory) and data_document.pdf (file) assert len(data_results) >= 2 @@ -622,7 +636,7 @@ async def test_search_directories( # Test 5: Search for backup pattern (should find both directory and file) backup_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "backup_*", project_id + simcore_s3_dsm, user_id, product_name, "backup_*", project_id ) assert len(backup_results) >= 2 # Should find backup_directory/ and backup_config.json @@ -633,6 +647,6 @@ async def test_search_directories( # Test 6: Search for subdirectories subdir_results = await _search_files_by_pattern( - simcore_s3_dsm, user_id, "*subdir_*", project_id + simcore_s3_dsm, user_id, product_name, "*subdir_*", project_id ) assert len(subdir_results) == 1 # Only subdir_a diff --git a/services/web/server/tests/unit/with_dbs/01/storage/conftest.py b/services/web/server/tests/unit/with_dbs/01/storage/conftest.py index 185ad54855ed..3a3e177a9729 100644 --- a/services/web/server/tests/unit/with_dbs/01/storage/conftest.py +++ b/services/web/server/tests/unit/with_dbs/01/storage/conftest.py @@ -30,6 +30,7 @@ PathMetaDataGet, ) from models_library.generics import Envelope +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID @@ -186,6 +187,7 @@ async def _list_dataset_files_metadata(user_id: UserID, request: Request): ) async def upload_file( user_id: UserID, + product_name: ProductName, location_id: LocationID, file_id: StorageFileID, request: Request, @@ -206,7 +208,7 @@ async def upload_file( ), encoded=True, ) - .with_query(user_id=user_id) + .with_query(user_id=user_id, product_name=product_name) ) complete_url = ( @@ -222,7 +224,7 @@ async def upload_file( ), encoded=True, ) - .with_query(user_id=user_id) + .with_query(user_id=user_id, product_name=product_name) ) response = FileUploadSchema.model_validate( random.choice( # noqa: S311 From 4cc1cde32c732443acb8fa1365b65db1d7d80d8b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 13:26:16 +0100 Subject: [PATCH 07/35] add product_name query param --- .../server/src/simcore_service_webserver/storage/_rest.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/storage/_rest.py b/services/web/server/src/simcore_service_webserver/storage/_rest.py index 86a5262bd323..e25a88630dce 100644 --- a/services/web/server/src/simcore_service_webserver/storage/_rest.py +++ b/services/web/server/src/simcore_service_webserver/storage/_rest.py @@ -62,6 +62,7 @@ from .._meta import API_VTAG from ..celery import get_task_manager +from ..constants import RQ_PRODUCT_KEY from ..login.decorators import login_required from ..models import AuthenticatedRequestContext, WebServerOwnerMetadata from ..rabbitmq import get_rabbitmq_rpc_client @@ -86,7 +87,8 @@ def _get_storage_vtag(app: web.Application) -> str: def _to_storage_url(request: web.Request) -> URL: """Converts web-api url to storage-api url""" - userid = request[RQT_USERID_KEY] + user_id = request[RQT_USERID_KEY] + product_name = request[RQ_PRODUCT_KEY] # storage service API endpoint url = _get_base_storage_url(request.app) @@ -106,7 +108,7 @@ def _to_storage_url(request: web.Request) -> URL: return ( url.joinpath(fastapi_encoded_suffix, encoded=True) .with_query({camel_to_snake(k): v for k, v in request.query.items()}) - .update_query(user_id=userid) + .update_query(user_id=user_id, product_name=product_name) ) From 202bf3d1a9d4acb4a34123f2b0af56cfe40645d1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 13:41:20 +0100 Subject: [PATCH 08/35] fix missing query param --- services/storage/tests/unit/test_handlers_paths.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/services/storage/tests/unit/test_handlers_paths.py b/services/storage/tests/unit/test_handlers_paths.py index 0fac7c5deb2c..0a0fc91b6edb 100644 --- a/services/storage/tests/unit/test_handlers_paths.py +++ b/services/storage/tests/unit/test_handlers_paths.py @@ -23,6 +23,7 @@ PathTotalSizeCreate, ) from models_library.api_schemas_webserver.storage import MAX_NUMBER_OF_PATHS_PER_PAGE +from models_library.products import ProductName from models_library.projects_nodes_io import LocationID, NodeID, SimcoreS3FileID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter @@ -577,6 +578,7 @@ async def _assert_compute_path_size( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, *, path: Path, expected_total_size: int, @@ -587,7 +589,7 @@ async def _assert_compute_path_size( "compute_path_size", location_id=f"{location_id}", path=f"{path}", - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.post(f"{url}") received, _ = assert_status( @@ -623,6 +625,7 @@ async def test_path_compute_size( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, with_random_project_with_files: tuple[ dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], @@ -646,6 +649,7 @@ async def test_path_compute_size( client, location_id, user_id, + product_name, path=path, expected_total_size=expected_total_size, ) @@ -664,6 +668,7 @@ async def test_path_compute_size( client, location_id, user_id, + product_name, path=path, expected_total_size=expected_total_size, ) @@ -683,6 +688,7 @@ async def test_path_compute_size( client, location_id, user_id, + product_name, path=path, expected_total_size=expected_total_size, ) @@ -702,6 +708,7 @@ async def test_path_compute_size( client, location_id, user_id, + product_name, path=path, expected_total_size=expected_total_size, ) @@ -727,6 +734,7 @@ async def test_path_compute_size( client, location_id, user_id, + product_name, path=workspace_subfolder, expected_total_size=expected_total_size, ) @@ -739,6 +747,7 @@ async def test_path_compute_size_inexistent_path( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, faker: Faker, fake_datcore_tokens: tuple[str, str], ): @@ -747,6 +756,7 @@ async def test_path_compute_size_inexistent_path( client, location_id, user_id, + product_name=product_name, path=Path(faker.file_path(absolute=False)), expected_total_size=0, ) From 43711897d62a8f00d0b467bec257df74addf0f7d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 13:57:34 +0100 Subject: [PATCH 09/35] add product_name query param --- services/storage/tests/unit/test_handlers_simcore_s3.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/storage/tests/unit/test_handlers_simcore_s3.py b/services/storage/tests/unit/test_handlers_simcore_s3.py index c79050623a16..1b37479e0dbd 100644 --- a/services/storage/tests/unit/test_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_handlers_simcore_s3.py @@ -162,6 +162,7 @@ async def test_search_files( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], faker: Faker, search_startswith: bool, @@ -175,6 +176,7 @@ async def test_search_files( jsonable_encoder( { "user_id": user_id, + "product_name": product_name, "kind": kind, }, exclude_none=True, From 66e9cd4c9f940b4a82d81d069faf19e845dc6b64 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 14:02:55 +0100 Subject: [PATCH 10/35] add product_name query param --- .../storage/tests/unit/test_handlers_simcore_s3.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/services/storage/tests/unit/test_handlers_simcore_s3.py b/services/storage/tests/unit/test_handlers_simcore_s3.py index 1b37479e0dbd..75596df6ed77 100644 --- a/services/storage/tests/unit/test_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_handlers_simcore_s3.py @@ -43,7 +43,7 @@ async def test_simcore_s3_access_returns_default( ): url = url_from_operation_id( client, initialized_app, "get_or_create_temporary_s3_access" - ).with_query(user_id=1) + ).with_query(user_id=1, product_name="test-product") response = await client.post(f"{url}") received_settings, error = assert_status(response, status.HTTP_200_OK, S3Settings) @@ -56,6 +56,7 @@ async def test_connect_to_external( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, ): url = url_from_operation_id( @@ -63,9 +64,13 @@ async def test_connect_to_external( initialized_app, "list_files_metadata", location_id=f"{SimcoreS3DataManager.get_location_id()}", - ).with_query(user_id=f"{user_id}", uuid_filter=f"{project_id}") + ).with_query( + user_id=f"{user_id}", + product_name=f"{product_name}", + uuid_filter=f"{project_id}", + ) resp = await client.get(f"{url}") - data, error = assert_status(resp, status.HTTP_200_OK, list[FileMetaDataGet]) + data, _ = assert_status(resp, status.HTTP_200_OK, list[FileMetaDataGet]) print(data) From dd6b7429c43c115229d868a0ecada49bfef09efd Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 14:51:40 +0100 Subject: [PATCH 11/35] add product_name query param --- services/storage/tests/conftest.py | 3 +- .../storage/tests/unit/test_handlers_files.py | 57 +++++++++++++------ .../storage/tests/unit/test_handlers_paths.py | 24 ++++++++ 3 files changed, 65 insertions(+), 19 deletions(-) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 5a78335f2c8d..6a1a58a87745 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -301,6 +301,7 @@ async def get_file_meta_data( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, location_id: LocationID, ) -> Callable[..., Awaitable[FileMetaDataGet]]: async def _getter(file_id: SimcoreS3FileID) -> FileMetaDataGet: @@ -310,7 +311,7 @@ async def _getter(file_id: SimcoreS3FileID) -> FileMetaDataGet: "get_file_metadata", location_id=f"{location_id}", file_id=file_id, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{url}") received_fmd, error = assert_status( diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index 85a11deb9659..d00217dbde8b 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -36,6 +36,7 @@ PresignedLink, SoftCopyBody, ) +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import LocationID, NodeID, SimcoreS3FileID from models_library.users import UserID @@ -200,6 +201,7 @@ async def create_upload_file_link_v1( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, location_id: LocationID, ) -> AsyncIterator[Callable[..., Awaitable[PresignedLink]]]: file_params: list[tuple[UserID, int, SimcoreS3FileID]] = [] @@ -211,7 +213,7 @@ async def _link_creator(file_id: SimcoreS3FileID, **query_kwargs) -> PresignedLi "upload_file", location_id=f"{location_id}", file_id=file_id, - ).with_query(**query_kwargs, user_id=user_id) + ).with_query(**query_kwargs, user_id=user_id, product_name=product_name) assert ( "file_size" not in url.query ), "v1 call to upload_file MUST NOT contain file_size field, this is reserved for v2 call" @@ -881,6 +883,7 @@ async def test_download_file_no_file_was_uploaded( project_id: ProjectID, node_id: NodeID, user_id: UserID, + product_name: ProductName, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, fake_datcore_tokens: tuple[str, str], @@ -900,7 +903,7 @@ async def test_download_file_no_file_was_uploaded( "download_file", location_id=f"{location_id}", file_id=missing_file, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{download_url}") data, error = assert_status(response, status.HTTP_404_NOT_FOUND, None) @@ -922,6 +925,7 @@ async def test_download_file_1_to_1_with_file_meta_data( upload_file: Callable[[ByteSize, str], Awaitable[tuple[Path, SimcoreS3FileID]]], location_id: LocationID, user_id: UserID, + product_name: ProductName, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, tmp_path: Path, @@ -945,7 +949,7 @@ async def test_download_file_1_to_1_with_file_meta_data( "download_file", location_id=f"{location_id}", file_id=uploaded_file_uuid, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{download_url}") data, error = assert_status(response, status.HTTP_200_OK, FileDownloadResponse) assert not error @@ -967,6 +971,7 @@ async def test_download_file_from_inside_a_directory( file_size: ByteSize, location_id: LocationID, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, create_empty_directory: Callable[ @@ -1010,7 +1015,7 @@ async def test_download_file_from_inside_a_directory( "download_file", location_id=f"{location_id}", file_id=s3_file_id, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{download_url}") file_download, error = assert_status( response, status.HTTP_200_OK, FileDownloadResponse @@ -1034,6 +1039,7 @@ async def test_download_file_the_file_is_missing_from_the_directory( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, create_empty_directory: Callable[ @@ -1054,7 +1060,7 @@ async def test_download_file_the_file_is_missing_from_the_directory( "download_file", location_id=f"{location_id}", file_id=missing_s3_file_id, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{download_url}") data, error = assert_status(response, status.HTTP_404_NOT_FOUND, None) @@ -1073,6 +1079,7 @@ async def test_download_file_access_rights( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, storage_s3_client: SimcoreS3API, storage_s3_bucket: S3BucketName, faker: Faker, @@ -1093,7 +1100,7 @@ async def test_download_file_access_rights( "download_file", location_id=f"{location_id}", file_id=missing_file, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{download_url}") data, error = assert_status(response, status.HTTP_403_FORBIDDEN, None) @@ -1124,6 +1131,7 @@ async def test_delete_file( upload_file: Callable[[ByteSize, str], Awaitable[tuple[Path, SimcoreS3FileID]]], location_id: LocationID, user_id: UserID, + product_name: ProductName, faker: Faker, ): _, uploaded_file_uuid = await upload_file(file_size, faker.file_name()) @@ -1134,7 +1142,7 @@ async def test_delete_file( "delete_file", location_id=f"{location_id}", file_id=uploaded_file_uuid, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.delete(f"{delete_url}") assert_status(response, status.HTTP_204_NO_CONTENT, None) @@ -1165,6 +1173,7 @@ async def test_copy_as_soft_link( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, upload_file: Callable[[ByteSize, str], Awaitable[tuple[Path, SimcoreS3FileID]]], @@ -1179,14 +1188,14 @@ async def test_copy_as_soft_link( initialized_app, "copy_as_soft_link", file_id=missing_file_uuid, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.post( f"{url}", json=jsonable_encoder(SoftCopyBody(link_id=invalid_link_id)) ) assert_status(response, status.HTTP_404_NOT_FOUND, None) # now let's try with whatever link id - file, original_file_uuid = await upload_file( + _, original_file_uuid = await upload_file( TypeAdapter(ByteSize).validate_python("10Mib"), faker.file_name() ) url = url_from_operation_id( @@ -1194,7 +1203,7 @@ async def test_copy_as_soft_link( initialized_app, "copy_as_soft_link", file_id=original_file_uuid, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) link_id = TypeAdapter(SimcoreS3FileID).validate_python( f"api/{node_id}/{faker.file_name()}" @@ -1212,6 +1221,7 @@ async def _list_files( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, location_id: LocationID, *, expand_dirs: bool, @@ -1221,7 +1231,9 @@ async def _list_files( initialized_app, "list_files_metadata", location_id=f"{location_id}", - ).with_query(user_id=user_id, expand_dirs=f"{expand_dirs}".lower()) + ).with_query( + user_id=user_id, product_name=product_name, expand_dirs=f"{expand_dirs}".lower() + ) response = await client.get(f"{get_url}") fmds, error = assert_status(response, status.HTTP_200_OK, list[FileMetaDataGet]) assert not error @@ -1233,12 +1245,14 @@ async def _list_files_legacy( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, location_id: LocationID, ) -> list[FileMetaDataGet]: return await _list_files( initialized_app, client, user_id, + product_name, location_id, expand_dirs=True, ) @@ -1248,12 +1262,14 @@ async def _list_files_and_directories( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, location_id: LocationID, ) -> list[FileMetaDataGet]: return await _list_files( initialized_app, client, user_id, + product_name, location_id, expand_dirs=False, ) @@ -1283,6 +1299,7 @@ async def test_is_directory_link_forces_link_type_and_size( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, link_type: LinkType, file_size: ByteSize, ): @@ -1298,7 +1315,7 @@ async def test_is_directory_link_forces_link_type_and_size( assert len(directory_file_upload.urls) == 1 files_and_directories: list[FileMetaDataGet] = await _list_files_and_directories( - initialized_app, client, user_id, location_id + initialized_app, client, user_id, product_name, location_id ) assert len(files_and_directories) == 1 assert files_and_directories[0].is_directory is True @@ -1357,6 +1374,7 @@ async def test_upload_file_is_directory_and_remove_content( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, ): @@ -1375,7 +1393,7 @@ async def test_upload_file_is_directory_and_remove_content( assert len(files_and_directories) == 1 list_of_files: list[FileMetaDataGet] = await _list_files_legacy( - initialized_app, client, user_id, location_id + initialized_app, client, user_id, product_name, location_id ) assert len(list_of_files) == 0 @@ -1396,7 +1414,7 @@ async def test_upload_file_is_directory_and_remove_content( assert len(files_and_directories) == 1 list_of_files: list[FileMetaDataGet] = await _list_files_legacy( - initialized_app, client, user_id, location_id + initialized_app, client, user_id, product_name, location_id ) assert len(list_of_files) == FILE_COUNT @@ -1414,7 +1432,7 @@ async def test_upload_file_is_directory_and_remove_content( assert error is None list_of_files: list[FileMetaDataGet] = await _list_files_legacy( - initialized_app, client, user_id, location_id + initialized_app, client, user_id, product_name, location_id ) assert len(list_of_files) == FILE_COUNT @@ -1433,7 +1451,7 @@ async def test_upload_file_is_directory_and_remove_content( assert error is None list_of_files = await _list_files_legacy( - initialized_app, client, user_id, location_id + initialized_app, client, user_id, product_name, location_id ) assert len(list_of_files) == FILE_COUNT - 1 @@ -1443,7 +1461,7 @@ async def test_upload_file_is_directory_and_remove_content( await delete_directory(directory_in_s3) list_of_files = await _list_files_legacy( - initialized_app, client, user_id, location_id + initialized_app, client, user_id, product_name, location_id ) assert len(list_of_files) == 0 @@ -1471,6 +1489,7 @@ async def test_listing_more_than_1000_objects_in_bucket( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, files_count: int, @@ -1485,7 +1504,7 @@ async def test_listing_more_than_1000_objects_in_bucket( node_id, ) list_of_files = await _list_files_legacy( - initialized_app, client, user_id, location_id + initialized_app, client, user_id, product_name, location_id ) # for now no more than 1000 objects will be returned assert len(list_of_files) == 1000 @@ -1514,6 +1533,7 @@ async def test_listing_with_project_id_filter( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, faker: Faker, random_project_with_files: Callable[ [ProjectWithFilesParams], @@ -1535,6 +1555,7 @@ async def test_listing_with_project_id_filter( query = { "user_id": user_id, + "product_name": product_name, "project_id": f"{project_id}", "uuid_filter": project_file_name if uuid_filter else None, } diff --git a/services/storage/tests/unit/test_handlers_paths.py b/services/storage/tests/unit/test_handlers_paths.py index 0a0fc91b6edb..2293f7726006 100644 --- a/services/storage/tests/unit/test_handlers_paths.py +++ b/services/storage/tests/unit/test_handlers_paths.py @@ -63,6 +63,7 @@ async def _assert_list_paths( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, *, file_filter: Path | None, limit: int = 25, @@ -79,6 +80,7 @@ async def _assert_list_paths( client, initialized_app, "list_paths", location_id=f"{location_id}" ).with_query( user_id=user_id, + product_name=product_name, size=limit, ) if next_cursor: @@ -124,6 +126,7 @@ async def test_list_paths_root_folder_of_empty_returns_nothing( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, fake_datcore_tokens: tuple[str, str], ): await _assert_list_paths( @@ -131,6 +134,7 @@ async def test_list_paths_root_folder_of_empty_returns_nothing( client, location_id, user_id, + product_name, file_filter=None, expected_paths=[], ) @@ -158,6 +162,7 @@ async def test_list_paths_pagination( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, with_random_project_with_files: tuple[ dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], @@ -177,6 +182,7 @@ async def test_list_paths_pagination( client, location_id, user_id, + product_name, file_filter=file_filter, expected_paths=expected_paths, limit=int(num_nodes / 2 + 0.5), @@ -197,6 +203,7 @@ async def test_list_paths_pagination( client, location_id, user_id, + product_name, file_filter=workspace_file_filter, expected_paths=expected_paths, limit=1, @@ -213,6 +220,7 @@ async def test_list_paths_pagination( client, location_id, user_id, + product_name, file_filter=selected_path_filter[0], expected_paths=expected_paths, check_total=False, @@ -241,6 +249,7 @@ async def test_list_paths_pagination_large_page( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, with_random_project_with_files: tuple[ dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], @@ -260,6 +269,7 @@ async def test_list_paths_pagination_large_page( client, location_id, user_id, + product_name, file_filter=workspace_file_filter, expected_paths=expected_paths, check_total=False, @@ -292,6 +302,7 @@ async def test_list_paths( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, random_project_with_files: Callable[ [ProjectWithFilesParams], Awaitable[ @@ -316,6 +327,7 @@ async def test_list_paths( client, location_id, user_id, + product_name, file_filter=None, expected_paths=expected_paths, ) @@ -336,6 +348,7 @@ async def test_list_paths( client, location_id, user_id, + product_name, file_filter=partial_file_filter, expected_paths=partial_expected_paths, ) @@ -351,6 +364,7 @@ async def test_list_paths( client, location_id, user_id, + product_name, file_filter=file_filter, expected_paths=expected_paths, ) @@ -372,6 +386,7 @@ async def test_list_paths( client, location_id, user_id, + product_name, file_filter=file_filter, expected_paths=expected_node_files, ) @@ -386,6 +401,7 @@ async def test_list_paths( client, location_id, user_id, + product_name, file_filter=node_outputs_file_filter, expected_paths=expected_paths, ) @@ -400,6 +416,7 @@ async def test_list_paths( client, location_id, user_id, + product_name, file_filter=node_outputs_file_filter, expected_paths=expected_paths, ) @@ -414,6 +431,7 @@ async def test_list_paths( client, location_id, user_id, + product_name, file_filter=workspace_file_filter, expected_paths=expected_paths, check_total=False, @@ -429,6 +447,7 @@ async def test_list_paths( client, location_id, user_id, + product_name, file_filter=selected_path_filter[0], expected_paths=expected_paths, check_total=False, @@ -457,6 +476,7 @@ async def test_list_paths_with_display_name_containing_slashes( client: httpx.AsyncClient, location_id: LocationID, user_id: UserID, + product_name: ProductName, with_random_project_with_files: tuple[ dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], @@ -497,6 +517,7 @@ async def test_list_paths_with_display_name_containing_slashes( client, location_id, user_id, + product_name, file_filter=file_filter, expected_paths=expected_paths, ) @@ -517,6 +538,7 @@ async def test_list_paths_with_display_name_containing_slashes( client, location_id, user_id, + product_name, file_filter=file_filter, expected_paths=expected_paths, ) @@ -540,6 +562,7 @@ async def test_list_paths_with_display_name_containing_slashes( client, location_id, user_id, + product_name, file_filter=workspace_file_filter, expected_paths=expected_paths, check_total=False, @@ -556,6 +579,7 @@ async def test_list_paths_with_display_name_containing_slashes( client, location_id, user_id, + product_name, file_filter=selected_path_filter[0], expected_paths=expected_paths, check_total=False, From 645d0c686c372b4b524db8317289d333f26603b5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 14:57:32 +0100 Subject: [PATCH 12/35] fix test --- services/storage/tests/unit/test_handlers_files.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index d00217dbde8b..80813cf8c998 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -1388,7 +1388,7 @@ async def test_upload_file_is_directory_and_remove_content( directory_in_s3 = await create_empty_directory(DIR_NAME, project_id, node_id) files_and_directories: list[FileMetaDataGet] = await _list_files_and_directories( - initialized_app, client, user_id, location_id + initialized_app, client, user_id, product_name, location_id ) assert len(files_and_directories) == 1 @@ -1409,7 +1409,7 @@ async def test_upload_file_is_directory_and_remove_content( ) files_and_directories: list[FileMetaDataGet] = await _list_files_and_directories( - initialized_app, client, user_id, location_id + initialized_app, client, user_id, product_name, location_id ) assert len(files_and_directories) == 1 @@ -1466,7 +1466,7 @@ async def test_upload_file_is_directory_and_remove_content( assert len(list_of_files) == 0 files_and_directories = await _list_files_and_directories( - initialized_app, client, user_id, location_id + initialized_app, client, user_id, product_name, location_id ) assert len(files_and_directories) == 0 From e9c6873f14d22d03cff1c8384d0aa78a0a226156 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 15:23:28 +0100 Subject: [PATCH 13/35] add product_name arg --- .../rabbitmq/rpc_interfaces/storage/paths.py | 9 +++++++-- .../src/simcore_service_storage/api/rpc/_paths.py | 5 +++++ .../storage/tests/unit/test_rpc_handlers_paths.py | 13 +++++++------ 3 files changed, 19 insertions(+), 8 deletions(-) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/paths.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/paths.py index c03be37d3937..3f7413ce20da 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/paths.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/paths.py @@ -4,6 +4,7 @@ AsyncJobGet, ) from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE +from models_library.products import ProductName from models_library.projects_nodes_io import LocationID from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID @@ -20,7 +21,8 @@ async def compute_path_size( location_id: LocationID, path: Path, owner_metadata: OwnerMetadata, - user_id: UserID + user_id: UserID, + product_name: ProductName, ) -> tuple[AsyncJobGet, OwnerMetadata]: async_job_rpc_get = await submit( rabbitmq_rpc_client=client, @@ -30,6 +32,7 @@ async def compute_path_size( location_id=location_id, path=path, user_id=user_id, + product_name=product_name, ) return async_job_rpc_get, owner_metadata @@ -40,7 +43,8 @@ async def delete_paths( location_id: LocationID, paths: set[Path], owner_metadata: OwnerMetadata, - user_id: UserID + user_id: UserID, + product_name: ProductName, ) -> tuple[AsyncJobGet, OwnerMetadata]: async_job_rpc_get = await submit( rabbitmq_rpc_client=client, @@ -50,5 +54,6 @@ async def delete_paths( location_id=location_id, paths=paths, user_id=user_id, + product_name=product_name, ) return async_job_rpc_get, owner_metadata diff --git a/services/storage/src/simcore_service_storage/api/rpc/_paths.py b/services/storage/src/simcore_service_storage/api/rpc/_paths.py index 31aaa1fdc67e..65ffc3aa0c4b 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_paths.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_paths.py @@ -4,6 +4,7 @@ from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, ) +from models_library.products import ProductName from models_library.projects_nodes_io import LocationID from models_library.users import UserID from servicelib.celery.models import ExecutionMetadata, OwnerMetadata @@ -24,6 +25,7 @@ async def compute_path_size( location_id: LocationID, path: Path, user_id: UserID, + product_name: ProductName, ) -> AsyncJobGet: task_name = remote_compute_path_size.__name__ task_uuid = await task_manager.submit_task( @@ -32,6 +34,7 @@ async def compute_path_size( ), owner_metadata=owner_metadata, user_id=user_id, + product_name=product_name, location_id=location_id, path=path, ) @@ -46,6 +49,7 @@ async def delete_paths( location_id: LocationID, paths: set[Path], user_id: UserID, + product_name: ProductName, ) -> AsyncJobGet: task_name = remote_delete_paths.__name__ task_uuid = await task_manager.submit_task( @@ -54,6 +58,7 @@ async def delete_paths( ), owner_metadata=owner_metadata, user_id=user_id, + product_name=product_name, location_id=location_id, paths=paths, ) diff --git a/services/storage/tests/unit/test_rpc_handlers_paths.py b/services/storage/tests/unit/test_rpc_handlers_paths.py index 8ea54e4c614d..689eb9ba01f3 100644 --- a/services/storage/tests/unit/test_rpc_handlers_paths.py +++ b/services/storage/tests/unit/test_rpc_handlers_paths.py @@ -22,7 +22,6 @@ from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE from models_library.products import ProductName from models_library.projects_nodes_io import LocationID, NodeID, SimcoreS3FileID -from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.storage_utils import FileIDDict, ProjectWithFilesParams @@ -74,7 +73,7 @@ async def _assert_compute_path_size( path: Path, expected_total_size: int, ) -> ByteSize: - async_job, async_job_name = await compute_path_size( + async_job, _ = await compute_path_size( storage_rpc_client, location_id=location_id, path=path, @@ -82,11 +81,12 @@ async def _assert_compute_path_size( user_id=user_id, product_name=product_name, owner="pytest_client_name" ), user_id=user_id, + product_name=product_name, ) async for job_composed_result in wait_and_get_result( storage_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, - method_name=RPCMethodName(compute_path_size.__name__), + method_name=compute_path_size.__name__, job_id=async_job.job_id, owner_metadata=TestOwnerMetadata( user_id=user_id, product_name=product_name, owner="pytest_client_name" @@ -112,7 +112,7 @@ async def _assert_delete_paths( *, paths: set[Path], ) -> None: - async_job, async_job_name = await delete_paths( + async_job, _ = await delete_paths( storage_rpc_client, location_id=location_id, paths=paths, @@ -120,11 +120,12 @@ async def _assert_delete_paths( user_id=user_id, product_name=product_name, owner="pytest_client_name" ), user_id=user_id, + product_name=product_name, ) async for job_composed_result in wait_and_get_result( storage_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, - method_name=RPCMethodName(compute_path_size.__name__), + method_name=delete_paths.__name__, job_id=async_job.job_id, owner_metadata=TestOwnerMetadata( user_id=user_id, product_name=product_name, owner="pytest_client_name" @@ -289,9 +290,9 @@ async def test_path_compute_size_inexistent_path( storage_rabbitmq_rpc_client, location_id, user_id, + product_name, path=Path(faker.file_path(absolute=False)), expected_total_size=0, - product_name=product_name, ) From d70aa0d0835b39aeeff49665080f04aabad7a0fe Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 15:34:33 +0100 Subject: [PATCH 14/35] fix test --- services/storage/tests/unit/test_rpc_handlers_paths.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/storage/tests/unit/test_rpc_handlers_paths.py b/services/storage/tests/unit/test_rpc_handlers_paths.py index 689eb9ba01f3..ce1e581db7a9 100644 --- a/services/storage/tests/unit/test_rpc_handlers_paths.py +++ b/services/storage/tests/unit/test_rpc_handlers_paths.py @@ -125,7 +125,7 @@ async def _assert_delete_paths( async for job_composed_result in wait_and_get_result( storage_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, - method_name=delete_paths.__name__, + method_name=compute_path_size.__name__, job_id=async_job.job_id, owner_metadata=TestOwnerMetadata( user_id=user_id, product_name=product_name, owner="pytest_client_name" From 1a09d66789ccc949742e0b913477892f62e74aa1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 15:37:32 +0100 Subject: [PATCH 15/35] add product_name param --- .../src/simcore_service_storage/api/_worker_tasks/_paths.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py index 1b7a05e2dca7..650b990569a5 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py @@ -39,6 +39,7 @@ async def delete_paths( task: Task, task_key: TaskKey, user_id: UserID, + product_name: ProductName, location_id: LocationID, paths: set[Path], ) -> None: @@ -46,7 +47,7 @@ async def delete_paths( with log_context( _logger, logging.INFO, - msg=f"delete {paths=} in {location_id=} for {user_id=}", + msg=f"delete {paths=} in {location_id=} for {user_id=} {product_name=}", ): dsm = get_dsm_provider(get_app_server(task.app).app).get(location_id) files_ids: set[StorageFileID] = { From 790379ec8e3562e6ffe2b11166f1e12889e3e79e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 15:42:00 +0100 Subject: [PATCH 16/35] add product_name param --- .../web/server/src/simcore_service_webserver/storage/_rest.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/web/server/src/simcore_service_webserver/storage/_rest.py b/services/web/server/src/simcore_service_webserver/storage/_rest.py index e25a88630dce..61302bb51b52 100644 --- a/services/web/server/src/simcore_service_webserver/storage/_rest.py +++ b/services/web/server/src/simcore_service_webserver/storage/_rest.py @@ -230,6 +230,7 @@ async def compute_path_size(request: web.Request) -> web.Response: ).model_dump() ), user_id=req_ctx.user_id, + product_name=req_ctx.product_name, ) return _create_data_response_from_async_job(request, async_job) @@ -258,6 +259,7 @@ async def batch_delete_paths(request: web.Request): ).model_dump() ), user_id=req_ctx.user_id, + product_name=req_ctx.product_name, ) return _create_data_response_from_async_job(request, async_job) From c3fe800cd5aeeda1c04c36b5295ab165111841ff Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 16:22:31 +0100 Subject: [PATCH 17/35] add product_name query param --- .../storage/tests/unit/test_rpc_handlers_simcore_s3.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py index 1ca1c5d3c729..7e2666ffc532 100644 --- a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py @@ -428,7 +428,7 @@ async def _create_and_delete_folders_from_project( initialized_app, "delete_folders_of_project", folder_id=project_id, - ).with_query(user_id=f"{user_id}") + ).with_query(user_id=f"{user_id}", product_name=f"{product_name}") resp = await client.delete(f"{url}") assert_status(resp, status.HTTP_204_NO_CONTENT, None) @@ -439,7 +439,11 @@ async def _create_and_delete_folders_from_project( initialized_app, "list_files_metadata", location_id=f"{SimcoreS3DataManager.get_location_id()}", - ).with_query(user_id=f"{user_id}", uuid_filter=f"{project_id}") + ).with_query( + user_id=f"{user_id}", + product_name=f"{product_name}", + uuid_filter=f"{project_id}", + ) resp = await client.get(f"{url}") data, error = assert_status(resp, status.HTTP_200_OK, list[FileMetaDataGet]) assert not error From cbef0cf5d38255e6ee0942a69e0772be3c8e3c13 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 16:29:03 +0100 Subject: [PATCH 18/35] add product_name query param --- services/storage/tests/unit/test_handlers_datasets.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/services/storage/tests/unit/test_handlers_datasets.py b/services/storage/tests/unit/test_handlers_datasets.py index 5808a63f1f1b..df7f4289ec45 100644 --- a/services/storage/tests/unit/test_handlers_datasets.py +++ b/services/storage/tests/unit/test_handlers_datasets.py @@ -15,6 +15,7 @@ DatasetMetaDataGet, FileMetaDataGet, ) +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import LocationID, SimcoreS3FileID from models_library.users import UserID @@ -71,6 +72,7 @@ async def test_list_dataset_files_metadata( initialized_app: FastAPI, client: AsyncClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, location_id: LocationID, file_size: ByteSize, @@ -85,7 +87,7 @@ async def test_list_dataset_files_metadata( "list_dataset_files_metadata", location_id=location_id, dataset_id=project_id, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=f"{product_name}") response = await client.get(f"{url}") list_fmds, error = assert_status( @@ -122,7 +124,7 @@ async def test_list_datasets_metadata( ).with_query(user_id=user_id) response = await client.get(f"{url}") - list_datasets, error = assert_status( + list_datasets, _ = assert_status( response, status.HTTP_200_OK, list[DatasetMetaDataGet] ) assert response.status_code == status.HTTP_200_OK @@ -143,6 +145,7 @@ async def test_ensure_expand_dirs_defaults_true( initialized_app: FastAPI, client: AsyncClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, location_id: LocationID, ): @@ -157,7 +160,7 @@ async def test_ensure_expand_dirs_defaults_true( "list_dataset_files_metadata", location_id=location_id, dataset_id=project_id, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) await client.get(f"{url}") From dd26f73ce454918b46d9328f85ca5b18e91abfcc Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 21:45:57 +0100 Subject: [PATCH 19/35] add product fake factory --- .../storage_models.py | 2 + .../pytest_simcore/helpers/faker_factories.py | 2 +- .../simcore_storage_data_models.py | 44 +++++++++++++++++-- services/storage/tests/conftest.py | 5 --- .../unit/test_handlers_files_metadata.py | 11 +++-- 5 files changed, 50 insertions(+), 14 deletions(-) diff --git a/packages/postgres-database/src/simcore_postgres_database/storage_models.py b/packages/postgres-database/src/simcore_postgres_database/storage_models.py index 639a87359dee..6a74e92af0ee 100644 --- a/packages/postgres-database/src/simcore_postgres_database/storage_models.py +++ b/packages/postgres-database/src/simcore_postgres_database/storage_models.py @@ -7,6 +7,7 @@ from .models.base import metadata from .models.file_meta_data import file_meta_data from .models.groups import groups, user_to_groups +from .models.products import products from .models.projects import projects from .models.tokens import tokens from .models.users import users @@ -15,6 +16,7 @@ "tokens", "file_meta_data", "metadata", + "products", "projects", "users", "groups", diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py index 3e80d3269aa6..fc7631e92546 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py @@ -184,7 +184,7 @@ def random_project(fake: Faker = DEFAULT_FAKER, **overrides) -> dict[str, Any]: "access_rights": {}, "workbench": {}, "published": False, - "product_name": "osparc", + "product_name": fake.word(), } icon = fake.random_element([random_icon_url(fake), None]) # nullable diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py index a41d4876612d..69d179477c8d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py @@ -9,16 +9,17 @@ import pytest import sqlalchemy as sa from faker import Faker +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID from pydantic import TypeAdapter from simcore_postgres_database.models.project_to_groups import project_to_groups -from simcore_postgres_database.storage_models import projects, users +from simcore_postgres_database.storage_models import products, projects, users from sqlalchemy.dialects.postgresql import insert as pg_insert from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine -from .helpers.faker_factories import DEFAULT_FAKER, random_project +from .helpers.faker_factories import DEFAULT_FAKER, random_product, random_project from .helpers.postgres_users import insert_and_get_user_and_secrets_lifespan @@ -51,14 +52,49 @@ async def other_user_id(sqlalchemy_async_engine: AsyncEngine) -> AsyncIterator[U yield new_user_id +@pytest.fixture +async def create_product( + sqlalchemy_async_engine: AsyncEngine, +) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]: + created_product_names = [] + + async def _creator(**kwargs) -> dict[str, Any]: + product_config = {} + product_config.update(kwargs) + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute( + products.insert() + .values(**random_product(**product_config)) + .returning(sa.literal_column("*")) + ) + row = result.one() + created_product_names.append(row.name) + return dict(row._asdict()) + + yield _creator + + async with sqlalchemy_async_engine.begin() as conn: + await conn.execute( + products.delete().where(products.c.name.in_(created_product_names)) + ) + + +@pytest.fixture +async def product_name( + create_product: Callable[..., Awaitable[dict[str, Any]]], +) -> ProductName: + product = await create_product() + return ProductName(product["name"]) + + @pytest.fixture async def create_project( - user_id: UserID, sqlalchemy_async_engine: AsyncEngine + user_id: UserID, product_name: ProductName, sqlalchemy_async_engine: AsyncEngine ) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]: created_project_uuids = [] async def _creator(**kwargs) -> dict[str, Any]: - prj_config = {"prj_owner": user_id} + prj_config = {"prj_owner": user_id, "product_name": product_name} prj_config.update(kwargs) async with sqlalchemy_async_engine.begin() as conn: result = await conn.execute( diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 6a1a58a87745..eed643dcd1d1 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -1073,11 +1073,6 @@ async def storage_rabbitmq_rpc_client( return rpc_client -@pytest.fixture -def product_name(faker: Faker) -> str: - return faker.word() - - @pytest.fixture def set_log_levels_for_noisy_libraries() -> None: # Reduce the log level for 'werkzeug' diff --git a/services/storage/tests/unit/test_handlers_files_metadata.py b/services/storage/tests/unit/test_handlers_files_metadata.py index dd8bd4a2728d..9291562ffc0e 100644 --- a/services/storage/tests/unit/test_handlers_files_metadata.py +++ b/services/storage/tests/unit/test_handlers_files_metadata.py @@ -16,6 +16,7 @@ FileMetaDataGet, SimcoreS3FileID, ) +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import LocationID from models_library.users import UserID @@ -26,7 +27,7 @@ from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from yarl import URL -pytest_simcore_core_services_selection = ["postgres"] +pytest_simcore_core_services_selection = ["postgres", "rabbit"] pytest_simcore_ops_services_selection = ["adminer"] @@ -54,6 +55,7 @@ async def test_list_files_metadata( client: httpx.AsyncClient, user_id: UserID, other_user_id: UserID, + product_name: ProductName, location_id: LocationID, project_id: ProjectID, faker: Faker, @@ -63,7 +65,7 @@ async def test_list_files_metadata( .with_path( initialized_app.url_path_for("list_files_metadata", location_id=location_id) ) - .with_query(user_id=f"{user_id}") + .with_query(user_id=user_id, product_name=product_name) ) # this should return an empty list @@ -179,6 +181,7 @@ async def test_get_file_metadata( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, location_id: LocationID, project_id: ProjectID, simcore_file_id: SimcoreS3FileID, @@ -190,7 +193,7 @@ async def test_get_file_metadata( "get_file_metadata", location_id=f"{location_id}", file_id=simcore_file_id, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) # this should return an empty list response = await client.get(f"{url}") @@ -214,7 +217,7 @@ async def test_get_file_metadata( "get_file_metadata", location_id=f"{location_id}", file_id=selected_file_uuid, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{url}") fmd, error = assert_status(response, status.HTTP_200_OK, FileMetaDataGet) From c78d1f48f33597946c27e848279b26b199595703 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 21:58:22 +0100 Subject: [PATCH 20/35] set default product --- .../src/pytest_simcore/helpers/faker_factories.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py index fc7631e92546..3e80d3269aa6 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py @@ -184,7 +184,7 @@ def random_project(fake: Faker = DEFAULT_FAKER, **overrides) -> dict[str, Any]: "access_rights": {}, "workbench": {}, "published": False, - "product_name": fake.word(), + "product_name": "osparc", } icon = fake.random_element([random_icon_url(fake), None]) # nullable From 6e12492dd0c2e31ee37641d6948d315190819d92 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 22:07:05 +0100 Subject: [PATCH 21/35] fix test --- services/storage/tests/unit/test_handlers_datcore.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/storage/tests/unit/test_handlers_datcore.py b/services/storage/tests/unit/test_handlers_datcore.py index a71626100317..047e6010c583 100644 --- a/services/storage/tests/unit/test_handlers_datcore.py +++ b/services/storage/tests/unit/test_handlers_datcore.py @@ -1,6 +1,7 @@ import httpx import pytest from fastapi import FastAPI, status +from models_library.products import ProductName from models_library.projects_nodes_io import LocationID from models_library.users import UserID from pytest_simcore.helpers.fastapi import url_from_operation_id @@ -34,12 +35,11 @@ async def test_entrypoint_without_api_tokens_return_401( location_id: LocationID, entrypoint: str, user_id: UserID, + product_name: ProductName, ): url = url_from_operation_id( client, initialized_app, entrypoint, location_id=f"{location_id}" - ).with_query( - user_id=user_id, - ) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{url}") assert_status( response, From 8b6fbea2a5d89f39ef21a4f9137aa639ace98f55 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 22:32:00 +0100 Subject: [PATCH 22/35] add product_name query param --- services/storage/tests/conftest.py | 2 +- .../tests/unit/test_handlers_datasets.py | 6 ++++-- .../storage/tests/unit/test_handlers_files.py | 11 +++++----- .../unit/test_handlers_files_metadata.py | 3 ++- .../projects/_controller/nodes_rest.py | 10 ++++++--- .../projects/_nodes_service.py | 21 ++++++++++++------- .../simcore_service_webserver/storage/api.py | 7 +++++-- 7 files changed, 39 insertions(+), 21 deletions(-) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index eed643dcd1d1..0de6d8d4a747 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -367,7 +367,7 @@ async def _link_creator( "delete_file", location_id=f"{loc_id}", file_id=file_id, - ).with_query(user_id=u_id) + ).with_query(user_id=u_id, product_name=product_name) clean_tasks.append(client.delete(f"{url}")) await asyncio.gather(*clean_tasks) diff --git a/services/storage/tests/unit/test_handlers_datasets.py b/services/storage/tests/unit/test_handlers_datasets.py index df7f4289ec45..5eaf9c5de168 100644 --- a/services/storage/tests/unit/test_handlers_datasets.py +++ b/services/storage/tests/unit/test_handlers_datasets.py @@ -38,6 +38,7 @@ async def test_list_dataset_files_metadata_with_no_files_returns_empty_array( initialized_app: FastAPI, client: AsyncClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, location_id: LocationID, fake_datcore_tokens: tuple[str, str], @@ -48,7 +49,7 @@ async def test_list_dataset_files_metadata_with_no_files_returns_empty_array( "list_dataset_files_metadata", location_id=location_id, dataset_id=project_id, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{url}") data, error = assert_status(response, status.HTTP_200_OK, list[FileMetaDataGet]) @@ -113,6 +114,7 @@ async def test_list_datasets_metadata( initialized_app: FastAPI, client: AsyncClient, user_id: UserID, + product_name: ProductName, location_id: LocationID, project_id: ProjectID, ): @@ -121,7 +123,7 @@ async def test_list_datasets_metadata( initialized_app, "list_datasets_metadata", location_id=location_id, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{url}") list_datasets, _ = assert_status( diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index 80813cf8c998..2b9c0a5cac64 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -231,14 +231,14 @@ async def _link_creator(file_id: SimcoreS3FileID, **query_kwargs) -> PresignedLi # cleanup clean_tasks = [] - for u_id, loc_id, file_id in file_params: + for u_id, _, file_id in file_params: url = url_from_operation_id( client, initialized_app, "upload_file", location_id=f"{location_id}", file_id=file_id, - ).with_query(user_id=u_id) + ).with_query(user_id=u_id, product_name=product_name) clean_tasks.append(client.delete(f"{url}")) await asyncio.gather(*clean_tasks) @@ -1334,6 +1334,7 @@ async def test_ensure_expand_dirs_defaults_true( mocker: MockerFixture, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, location_id: LocationID, ): mocked_object = mocker.patch( @@ -1346,7 +1347,7 @@ async def test_ensure_expand_dirs_defaults_true( initialized_app, "list_files_metadata", location_id=f"{location_id}", - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) await client.get(f"{get_url}") assert len(mocked_object.call_args_list) == 1 @@ -1426,7 +1427,7 @@ async def test_upload_file_is_directory_and_remove_content( "delete_file", location_id=f"{location_id}", file_id="/".join(list_of_files[0].file_id.split("/")[:2]) + "/does_not_exist", - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.delete(f"{delete_url}") _, error = assert_status(response, status.HTTP_204_NO_CONTENT, None) assert error is None @@ -1445,7 +1446,7 @@ async def test_upload_file_is_directory_and_remove_content( "delete_file", location_id=f"{location_id}", file_id=list_of_files[0].file_id, - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.delete(f"{delete_url}") _, error = assert_status(response, status.HTTP_204_NO_CONTENT, None) assert error is None diff --git a/services/storage/tests/unit/test_handlers_files_metadata.py b/services/storage/tests/unit/test_handlers_files_metadata.py index 9291562ffc0e..95d78d676322 100644 --- a/services/storage/tests/unit/test_handlers_files_metadata.py +++ b/services/storage/tests/unit/test_handlers_files_metadata.py @@ -150,6 +150,7 @@ async def test_get_file_metadata_is_legacy_services_compatible( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, location_id: LocationID, simcore_file_id: SimcoreS3FileID, ): @@ -162,7 +163,7 @@ async def test_get_file_metadata_is_legacy_services_compatible( file_id=simcore_file_id, ) ) - .with_query(user_id=f"{user_id}") + .with_query(user_id=user_id, product_name=product_name) ) # this should return an empty list diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py index 18bc49d9a553..0af069259016 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py @@ -34,7 +34,7 @@ from models_library.services_resources import ServiceResourcesDict from models_library.services_types import ServiceKey, ServiceVersion from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, TypeAdapter from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.server import start_long_running_task from servicelib.aiohttp.requests_validation import ( @@ -346,7 +346,7 @@ async def _stop_dynamic_service_task( }, ) ) - # ANE: in case there is an error reply as not found + # ANE: in case there is an error reply as not found # spellchecker:disable-line return create_error_response( error=ErrorGet( message=user_error_msg, @@ -718,6 +718,7 @@ async def list_project_nodes_previews(request: web.Request) -> web.Response: screenshots = await get_node_screenshots( app=request.app, user_id=req_ctx.user_id, + product_name=req_ctx.product_name, project_id=path_params.project_id, node_id=NodeID(node_id), node=node, @@ -754,7 +755,9 @@ async def get_project_node_preview(request: web.Request) -> web.Response: project = Project.model_validate(project_data) - node = project.workbench.get(NodeIDStr(path_params.node_id)) + node = project.workbench.get( + TypeAdapter(NodeIDStr).validate_python(path_params.node_id) + ) if node is None: raise NodeNotFoundError( project_uuid=f"{path_params.project_id}", @@ -767,6 +770,7 @@ async def get_project_node_preview(request: web.Request) -> web.Response: screenshots=await get_node_screenshots( app=request.app, user_id=req_ctx.user_id, + product_name=req_ctx.product_name, project_id=path_params.project_id, node_id=path_params.node_id, node=node, diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py index 5354b496ca5f..6eb642992859 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py @@ -8,7 +8,7 @@ from aiohttp import web from aiohttp.client import ClientError from models_library.api_schemas_storage.storage_schemas import FileMetaDataGet -from models_library.basic_types import KeyIDStr +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes import Node from models_library.projects_nodes_io import NodeID, SimCoreFileLink @@ -184,11 +184,15 @@ def _get_files_with_thumbnails( async def __get_link( - app: web.Application, user_id: UserID, file_meta_data: FileMetaDataGet + app: web.Application, + user_id: UserID, + product_name: ProductName, + file_meta_data: FileMetaDataGet, ) -> tuple[str, HttpUrl]: return __get_search_key(file_meta_data), await get_download_link( app, user_id, + product_name, SimCoreFileLink.model_validate({"store": "0", "path": file_meta_data.file_id}), ) @@ -196,6 +200,7 @@ async def __get_link( async def _get_node_screenshots( app: web.Application, user_id: UserID, + product_name: ProductName, files_with_thumbnails: list[_FileWithThumbnail], ) -> list[NodeScreenshot]: """resolves links concurrently before returning all the NodeScreenshots""" @@ -207,7 +212,7 @@ async def _get_node_screenshots( search_map[__get_search_key(entry.thumbnail)] = entry.thumbnail resolved_links: list[tuple[str, HttpUrl]] = await logged_gather( - *[__get_link(app, user_id, x) for x in search_map.values()], + *[__get_link(app, user_id, product_name, x) for x in search_map.values()], max_concurrency=10, ) @@ -226,6 +231,7 @@ async def _get_node_screenshots( async def get_node_screenshots( app: web.Application, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, node: Node, @@ -244,9 +250,9 @@ async def get_node_screenshots( assert node.outputs is not None # nosec - filelink = SimCoreFileLink.model_validate(node.outputs[KeyIDStr("outFile")]) + filelink = SimCoreFileLink.model_validate(node.outputs["outFile"]) - file_url = await get_download_link(app, user_id, filelink) + file_url = await get_download_link(app, user_id, product_name, filelink) screenshots.append( NodeScreenshot( thumbnail_url=f"https://placehold.co/170x120?text={text}", # type: ignore[arg-type] @@ -273,8 +279,9 @@ async def get_node_screenshots( ) resolved_screenshots: list[NodeScreenshot] = await _get_node_screenshots( - app=app, - user_id=user_id, + app, + user_id, + product_name, files_with_thumbnails=_get_files_with_thumbnails(assets_files), ) screenshots.extend(resolved_screenshots) diff --git a/services/web/server/src/simcore_service_webserver/storage/api.py b/services/web/server/src/simcore_service_webserver/storage/api.py index 7c463ebee0ab..bf7ec19020b8 100644 --- a/services/web/server/src/simcore_service_webserver/storage/api.py +++ b/services/web/server/src/simcore_service_webserver/storage/api.py @@ -198,7 +198,10 @@ async def get_app_status(app: web.Application) -> dict[str, Any]: async def get_download_link( - app: web.Application, user_id: UserID, filelink: SimCoreFileLink + app: web.Application, + user_id: UserID, + product_name: ProductName, + filelink: SimCoreFileLink, ) -> HttpUrl: """ Raises: @@ -213,7 +216,7 @@ async def get_download_link( api_endpoint / f"locations/{filelink.store}/files" / urllib.parse.quote(filelink.path, safe="") - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) async with session.get(f"{url}") as response: response.raise_for_status() From d4496cbb9926596deb5a8be85a121d26b7e61e00 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 23:17:14 +0100 Subject: [PATCH 23/35] fix test --- .../src/pytest_simcore/helpers/storage_rpc_server.py | 4 +++- .../servicelib/rabbitmq/rpc_interfaces/storage/simcore_s3.py | 3 +++ .../src/simcore_service_api_server/services_rpc/storage.py | 1 + .../src/simcore_service_storage/api/rpc/_simcore_s3.py | 3 +++ services/storage/tests/unit/test_rpc_handlers_simcore_s3.py | 1 + .../web/server/src/simcore_service_webserver/storage/_rest.py | 1 + 6 files changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/storage_rpc_server.py b/packages/pytest-simcore/src/pytest_simcore/helpers/storage_rpc_server.py index 72dc62ca438f..457c2d1d1c04 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/storage_rpc_server.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/storage_rpc_server.py @@ -12,6 +12,7 @@ AsyncJobGet, ) from models_library.api_schemas_webserver.storage import PathToExport +from models_library.products import ProductName from models_library.users import UserID from pydantic import TypeAdapter, validate_call from pytest_mock import MockType @@ -29,7 +30,8 @@ async def start_export_data( paths_to_export: list[PathToExport], export_as: Literal["path", "download_link"], owner_metadata: OwnerMetadata, - user_id: UserID + user_id: UserID, + product_name: ProductName, ) -> tuple[AsyncJobGet, OwnerMetadata]: assert rabbitmq_rpc_client assert owner_metadata diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/simcore_s3.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/simcore_s3.py index 35f5ee9118d2..37e5c33d0d72 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/simcore_s3.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/simcore_s3.py @@ -6,6 +6,7 @@ from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE from models_library.api_schemas_storage.storage_schemas import FoldersBody from models_library.api_schemas_webserver.storage import PathToExport +from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID from pydantic import TypeAdapter @@ -42,6 +43,7 @@ async def start_export_data( export_as: Literal["path", "download_link"], owner_metadata: OwnerMetadata, user_id: UserID, + product_name: ProductName, ) -> tuple[AsyncJobGet, OwnerMetadata]: async_job_rpc_get = await submit( rabbitmq_rpc_client, @@ -51,5 +53,6 @@ async def start_export_data( paths_to_export=paths_to_export, export_as=export_as, user_id=user_id, + product_name=product_name, ) return async_job_rpc_get, owner_metadata diff --git a/services/api-server/src/simcore_service_api_server/services_rpc/storage.py b/services/api-server/src/simcore_service_api_server/services_rpc/storage.py index 94a82fd7c369..a803ab65e15a 100644 --- a/services/api-server/src/simcore_service_api_server/services_rpc/storage.py +++ b/services/api-server/src/simcore_service_api_server/services_rpc/storage.py @@ -40,5 +40,6 @@ async def start_data_export( ).model_dump() ), user_id=self._user_id, + product_name=self._product_name, ) return async_job_get diff --git a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py index 885fce5cbd81..818db8c98c7a 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py @@ -5,6 +5,7 @@ ) from models_library.api_schemas_storage.storage_schemas import FoldersBody from models_library.api_schemas_webserver.storage import PathToExport +from models_library.products import ProductName from models_library.users import UserID from servicelib.celery.models import ( ExecutionMetadata, @@ -48,6 +49,7 @@ async def start_export_data( task_manager: TaskManager, owner_metadata: OwnerMetadata, user_id: UserID, + product_name: ProductName, paths_to_export: list[PathToExport], export_as: Literal["path", "download_link"], ) -> AsyncJobGet: @@ -65,6 +67,7 @@ async def start_export_data( ), owner_metadata=owner_metadata, user_id=user_id, + product_name=product_name, paths_to_export=paths_to_export, ) return AsyncJobGet(job_id=task_uuid, job_name=task_name) diff --git a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py index 7e2666ffc532..8844848b2391 100644 --- a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py @@ -551,6 +551,7 @@ async def _request_start_export_data( owner="PYTEST_CLIENT_NAME", ), user_id=user_id, + product_name=product_name, ) async for async_job_result in wait_and_get_result( diff --git a/services/web/server/src/simcore_service_webserver/storage/_rest.py b/services/web/server/src/simcore_service_webserver/storage/_rest.py index 61302bb51b52..713bbeb010df 100644 --- a/services/web/server/src/simcore_service_webserver/storage/_rest.py +++ b/services/web/server/src/simcore_service_webserver/storage/_rest.py @@ -531,6 +531,7 @@ class _PathParams(BaseModel): ).model_dump() ), user_id=_req_ctx.user_id, + product_name=_req_ctx.product_name, ) _job_id = f"{async_job_rpc_get.job_id}" return create_data_response( From 5c4f2b07ac176e8e9f83af7f16e72d0baad2d54e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 18 Dec 2025 23:22:41 +0100 Subject: [PATCH 24/35] fix type --- .../projects/_controller/nodes_rest.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py index 0af069259016..ec8fd2481d1d 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py @@ -28,13 +28,13 @@ from models_library.basic_types import IDStr from models_library.groups import EVERYONE_GROUP_ID, Group, GroupID, GroupType from models_library.projects import Project, ProjectID -from models_library.projects_nodes_io import NodeID, NodeIDStr +from models_library.projects_nodes_io import NodeID from models_library.rest_error import ErrorGet from models_library.services import ServiceKeyVersion from models_library.services_resources import ServiceResourcesDict from models_library.services_types import ServiceKey, ServiceVersion from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import BaseModel, Field, TypeAdapter +from pydantic import BaseModel, Field from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.server import start_long_running_task from servicelib.aiohttp.requests_validation import ( @@ -755,9 +755,7 @@ async def get_project_node_preview(request: web.Request) -> web.Response: project = Project.model_validate(project_data) - node = project.workbench.get( - TypeAdapter(NodeIDStr).validate_python(path_params.node_id) - ) + node = project.workbench.get(f"{path_params.node_id}") if node is None: raise NodeNotFoundError( project_uuid=f"{path_params.project_id}", From d7e1e20bf1b432279f1fe6afb4054929ce6cb0c5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 19 Dec 2025 08:25:38 +0100 Subject: [PATCH 25/35] add product_name query param --- services/storage/tests/unit/test_handlers_locations.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/services/storage/tests/unit/test_handlers_locations.py b/services/storage/tests/unit/test_handlers_locations.py index 4aae75e69de7..bf91ff22416c 100644 --- a/services/storage/tests/unit/test_handlers_locations.py +++ b/services/storage/tests/unit/test_handlers_locations.py @@ -8,6 +8,7 @@ import httpx from fastapi import FastAPI, status from models_library.api_schemas_storage.storage_schemas import FileLocation +from models_library.products import ProductName from models_library.users import UserID from pytest_simcore.helpers.fastapi import url_from_operation_id from pytest_simcore.helpers.httpx_assert_checks import assert_status @@ -22,11 +23,12 @@ async def test_locations( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, fake_datcore_tokens: tuple[str, str], ): url = url_from_operation_id( client, initialized_app, "list_storage_locations" - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{url}") data, _ = assert_status(response, status.HTTP_200_OK, list[FileLocation]) assert data @@ -45,10 +47,11 @@ async def test_locations_without_tokens( initialized_app: FastAPI, client: httpx.AsyncClient, user_id: UserID, + product_name: ProductName, ): url = url_from_operation_id( client, initialized_app, "list_storage_locations" - ).with_query(user_id=user_id) + ).with_query(user_id=user_id, product_name=product_name) response = await client.get(f"{url}") data, _ = assert_status(response, status.HTTP_200_OK, list[FileLocation]) assert data From c2b9011e26343a23383d8274a7da6e6d7cfadb56 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 19 Dec 2025 08:59:19 +0100 Subject: [PATCH 26/35] fix tests --- .../node_ports_common/storage_client.py | 25 +++++++++--- packages/simcore-sdk/tests/unit/conftest.py | 5 +++ .../tests/unit/test_storage_client.py | 39 +++++++++++++++---- 3 files changed, 56 insertions(+), 13 deletions(-) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py index 8b0f89a21974..068c106e04d6 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py @@ -19,6 +19,7 @@ ) from models_library.basic_types import SHA256Str from models_library.generics import Envelope +from models_library.products import ProductName from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID from pydantic import ByteSize @@ -136,14 +137,14 @@ async def retry_request( @handle_client_exception async def list_storage_locations( - *, session: ClientSession, user_id: UserID + *, session: ClientSession, user_id: UserID, product_name: ProductName ) -> FileLocationArray: async with retry_request( session, "GET", f"{get_base_url()}/locations", expected_status=status.HTTP_200_OK, - params={"user_id": f"{user_id}"}, + params={"user_id": f"{user_id}", "product_name": f"{product_name}"}, ) as response: locations_enveloped = Envelope[FileLocationArray].model_validate( await response.json() @@ -161,6 +162,7 @@ async def get_download_file_link( file_id: StorageFileID, location_id: LocationID, user_id: UserID, + product_name: ProductName, link_type: LinkType, ) -> AnyUrl: """ @@ -172,7 +174,11 @@ async def get_download_file_link( "GET", f"{get_base_url()}/locations/{location_id}/files/{quote(file_id, safe='')}", expected_status=status.HTTP_200_OK, - params={"user_id": f"{user_id}", "link_type": link_type.value}, + params={ + "user_id": f"{user_id}", + "product_name": f"{product_name}", + "link_type": link_type.value, + }, ) as response: presigned_link_enveloped = Envelope[PresignedLink].model_validate( await response.json() @@ -232,13 +238,14 @@ async def get_file_metadata( file_id: StorageFileID, location_id: LocationID, user_id: UserID, + product_name: ProductName, ) -> FileMetaDataGet: async with retry_request( session, "GET", f"{get_base_url()}/locations/{location_id}/files/{quote(file_id, safe='')}/metadata", expected_status=status.HTTP_200_OK, - params={"user_id": f"{user_id}"}, + params={"user_id": f"{user_id}", "product_name": f"{product_name}"}, ) as response: payload = await response.json() if not payload.get("data"): @@ -255,6 +262,7 @@ async def list_file_metadata( *, session: ClientSession, user_id: UserID, + product_name: ProductName, location_id: LocationID, uuid_filter: str, ) -> list[FileMetaDataGet]: @@ -263,7 +271,11 @@ async def list_file_metadata( "GET", f"{get_base_url()}/locations/{location_id}/files/metadata", expected_status=status.HTTP_200_OK, - params={"user_id": f"{user_id}", "uuid_filter": uuid_filter}, + params={ + "user_id": f"{user_id}", + "product_name": f"{product_name}", + "uuid_filter": uuid_filter, + }, ) as resp: envelope = Envelope[list[FileMetaDataGet]].model_validate(await resp.json()) assert envelope.data is not None # nosec @@ -278,12 +290,13 @@ async def delete_file( file_id: StorageFileID, location_id: LocationID, user_id: UserID, + product_name: ProductName, ) -> None: async with retry_request( session, "DELETE", f"{get_base_url()}/locations/{location_id}/files/{quote(file_id, safe='')}", expected_status=status.HTTP_204_NO_CONTENT, - params={"user_id": f"{user_id}"}, + params={"user_id": f"{user_id}", "product_name": f"{product_name}"}, ): ... diff --git a/packages/simcore-sdk/tests/unit/conftest.py b/packages/simcore-sdk/tests/unit/conftest.py index 527e02d10038..d78242f15472 100644 --- a/packages/simcore-sdk/tests/unit/conftest.py +++ b/packages/simcore-sdk/tests/unit/conftest.py @@ -17,6 +17,11 @@ def user_id() -> int: return randint(1, 10000) +@pytest.fixture(scope="module") +def product_name() -> str: + return f"product-{uuid4()}" + + @pytest.fixture(scope="module") def project_id() -> str: return f"{uuid4()}" diff --git a/packages/simcore-sdk/tests/unit/test_storage_client.py b/packages/simcore-sdk/tests/unit/test_storage_client.py index feb61ed20429..acad64ea7e54 100644 --- a/packages/simcore-sdk/tests/unit/test_storage_client.py +++ b/packages/simcore-sdk/tests/unit/test_storage_client.py @@ -18,6 +18,7 @@ FileUploadSchema, LocationID, ) +from models_library.products import ProductName from models_library.projects_nodes_io import SimcoreS3FileID from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter @@ -80,7 +81,9 @@ def mock_environment( @pytest.fixture() def file_id() -> SimcoreS3FileID: - return SimcoreS3FileID(f"{uuid4()}/{uuid4()}/some_fake_file_id") + return TypeAdapter(SimcoreS3FileID).validate_python( + f"{uuid4()}/{uuid4()}/some_fake_file_id" + ) @pytest.fixture() @@ -100,8 +103,11 @@ async def test_list_storage_locations( mock_postgres: EnvVarsDict, session: aiohttp.ClientSession, user_id: UserID, + product_name: ProductName, ): - result = await list_storage_locations(session=session, user_id=user_id) + result = await list_storage_locations( + session=session, user_id=user_id, product_name=product_name + ) assert isinstance(result, FileLocationArray) # type: ignore assert len(result) == 1 @@ -119,6 +125,7 @@ async def test_get_download_file_link( storage_v0_service_mock: AioResponsesMock, session: aiohttp.ClientSession, user_id: UserID, + product_name: ProductName, file_id: SimcoreS3FileID, location_id: LocationID, link_type: LinkType, @@ -129,6 +136,7 @@ async def test_get_download_file_link( file_id=file_id, location_id=location_id, user_id=user_id, + product_name=product_name, link_type=link_type, ) assert isinstance(link, AnyUrl) @@ -165,17 +173,22 @@ async def test_get_upload_file_links( assert file_upload_links.urls[0].scheme in expected_scheme -async def test_get_file_metada( +async def test_get_file_metadata( clear_caches: None, mock_environment: EnvVarsDict, storage_v0_service_mock: AioResponsesMock, session: aiohttp.ClientSession, user_id: UserID, + product_name: ProductName, file_id: SimcoreS3FileID, location_id: LocationID, ): file_metadata = await get_file_metadata( - session=session, file_id=file_id, location_id=location_id, user_id=user_id + session=session, + file_id=file_id, + location_id=location_id, + user_id=user_id, + product_name=product_name, ) assert file_metadata assert file_metadata == FileMetaDataGet.model_validate( @@ -211,12 +224,13 @@ def storage_v0_service_mock_get_file_meta_data_not_found( return aioresponses_mocker -async def test_get_file_metada_invalid_s3_path( +async def test_get_file_metadata_invalid_s3_path( clear_caches: None, mock_environment: EnvVarsDict, storage_v0_service_mock_get_file_meta_data_not_found: AioResponsesMock, session: aiohttp.ClientSession, user_id: UserID, + product_name: ProductName, file_id: SimcoreS3FileID, location_id: LocationID, ): @@ -226,6 +240,7 @@ async def test_get_file_metada_invalid_s3_path( file_id=file_id, location_id=location_id, user_id=user_id, + product_name=product_name, ) @@ -235,11 +250,16 @@ async def test_list_file_metadata( storage_v0_service_mock: AioResponsesMock, session: aiohttp.ClientSession, user_id: UserID, + product_name: ProductName, file_id: SimcoreS3FileID, location_id: LocationID, ): list_of_file_metadata = await list_file_metadata( - session=session, user_id=user_id, location_id=location_id, uuid_filter="" + session=session, + user_id=user_id, + product_name=product_name, + location_id=location_id, + uuid_filter="", ) assert list_of_file_metadata == [] @@ -250,11 +270,16 @@ async def test_delete_file( storage_v0_service_mock: AioResponsesMock, session: aiohttp.ClientSession, user_id: UserID, + product_name: ProductName, file_id: SimcoreS3FileID, location_id: LocationID, ): await delete_file( - session=session, file_id=file_id, location_id=location_id, user_id=user_id + session=session, + file_id=file_id, + location_id=location_id, + user_id=user_id, + product_name=product_name, ) From 74b159afb6906f547875cb0ac6924c90e44793f1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 19 Dec 2025 10:33:15 +0100 Subject: [PATCH 27/35] fix simcore_sdk tests --- .../src/simcore_sdk/node_data/data_manager.py | 29 ++++++- .../node_ports_common/_filemanager_utils.py | 9 +- .../node_ports_common/dbmanager.py | 15 ++-- .../node_ports_common/exceptions.py | 8 +- .../node_ports_common/filemanager.py | 31 +++++-- .../src/simcore_sdk/node_ports_v2/port.py | 37 +++++++- .../simcore_sdk/node_ports_v2/port_utils.py | 38 +++++++-- .../simcore-sdk/tests/integration/conftest.py | 25 +++++- .../test_node_data_data_manager.py | 11 +++ .../test_node_ports_common_filemanager.py | 85 +++++++++++++++---- .../tests/unit/test_node_data_data_manager.py | 24 ++++-- 11 files changed, 261 insertions(+), 51 deletions(-) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py index fbb9b1980110..8ece80eaabc3 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py @@ -2,6 +2,7 @@ from pathlib import Path from tempfile import TemporaryDirectory +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, StorageFileID from models_library.service_settings_labels import LegacyState @@ -35,6 +36,7 @@ def __get_s3_name(path: Path, *, is_archive: bool) -> str: async def _push_directory( user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, source_path: Path, @@ -50,6 +52,7 @@ async def _push_directory( ): await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=SIMCORE_LOCATION, store_name=None, s3_object=s3_object, @@ -63,6 +66,7 @@ async def _push_directory( async def _pull_directory( user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, destination_path: Path, @@ -79,6 +83,7 @@ async def _pull_directory( ): await filemanager.download_path_from_s3( user_id=user_id, + product_name=product_name, store_id=SIMCORE_LOCATION, store_name=None, s3_object=s3_object, @@ -91,6 +96,7 @@ async def _pull_directory( async def _pull_legacy_archive( user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, destination_path: Path, @@ -113,6 +119,7 @@ async def _pull_legacy_archive( _logger.info("pulling data from %s to %s...", s3_object, archive_file) downloaded_file = await filemanager.download_path_from_s3( user_id=user_id, + product_name=product_name, store_id=SIMCORE_LOCATION, store_name=None, s3_object=s3_object, @@ -141,6 +148,7 @@ async def _pull_legacy_archive( async def _state_metadata_entry_exists( user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, path: Path, @@ -156,6 +164,7 @@ async def _state_metadata_entry_exists( _logger.debug("Checking if s3_object='%s' is present", s3_object) return await filemanager.entry_exists( user_id=user_id, + product_name=product_name, store_id=SIMCORE_LOCATION, s3_object=s3_object, is_directory=not is_archive, @@ -174,16 +183,20 @@ async def _delete_legacy_archive( # NOTE: if service is opened by a person which the users shared it with, # they will not have the permission to delete the node # Removing it via it's owner allows to always have access to the delete operation. - owner_id = await DBManager( + owner, product_name = await DBManager( application_name=application_name - ).get_project_owner_user_id(project_id) + ).get_project_owner_and_product_name(project_id) await filemanager.delete_file( - user_id=owner_id, store_id=SIMCORE_LOCATION, s3_object=s3_object + user_id=owner, + product_name=product_name, + store_id=SIMCORE_LOCATION, + s3_object=s3_object, ) async def push( # pylint: disable=too-many-arguments user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, source_path: Path, @@ -199,6 +212,7 @@ async def push( # pylint: disable=too-many-arguments await _push_directory( user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, source_path=source_path, @@ -210,6 +224,7 @@ async def push( # pylint: disable=too-many-arguments archive_exists = await _state_metadata_entry_exists( user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, path=source_path, @@ -227,6 +242,7 @@ async def push( # pylint: disable=too-many-arguments if legacy_state: legacy_archive_exists = await _state_metadata_entry_exists( user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, path=legacy_state.old_state_path, @@ -246,6 +262,7 @@ async def push( # pylint: disable=too-many-arguments async def pull( user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, destination_path: Path, @@ -265,6 +282,7 @@ async def pull( ) legacy_state_exists = await _state_metadata_entry_exists( user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, path=legacy_state.old_state_path, @@ -279,6 +297,7 @@ async def pull( ): await _pull_legacy_archive( user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, destination_path=legacy_state.new_state_path, @@ -290,6 +309,7 @@ async def pull( state_archive_exists = await _state_metadata_entry_exists( user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, path=destination_path, @@ -299,6 +319,7 @@ async def pull( with log_context(_logger, logging.INFO, "restoring data from legacy archive"): await _pull_legacy_archive( user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, destination_path=destination_path, @@ -309,6 +330,7 @@ async def pull( state_directory_exists = await _state_metadata_entry_exists( user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, path=destination_path, @@ -317,6 +339,7 @@ async def pull( if state_directory_exists: await _pull_directory( user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, destination_path=destination_path, diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py index 043b763764ad..18a2d28d80aa 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py @@ -11,6 +11,7 @@ UploadedPart, ) from models_library.generics import Envelope +from models_library.products import ProductName from models_library.projects_nodes_io import LocationID, LocationName from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder @@ -31,10 +32,13 @@ async def _get_location_id_from_location_name( user_id: UserID, + product_name: ProductName, store: LocationName, session: ClientSession, ) -> LocationID: - resp = await storage_client.list_storage_locations(session=session, user_id=user_id) + resp = await storage_client.list_storage_locations( + session=session, user_id=user_id, product_name=product_name + ) for location in resp: if location.name == store: return cast(LocationID, location.id) # mypy wants it @@ -123,6 +127,7 @@ async def complete_upload( async def resolve_location_id( client_session: ClientSession, user_id: UserID, + product_name: ProductName, store_name: LocationName | None, store_id: LocationID | None, ) -> LocationID: @@ -132,7 +137,7 @@ async def resolve_location_id( if store_name is not None: store_id = await _get_location_id_from_location_name( - user_id, store_name, client_session + user_id, product_name, store_name, client_session ) assert store_id is not None # nosec return store_id diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py index 21c0f0173b91..cd46404c19d6 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py @@ -2,6 +2,7 @@ import sqlalchemy as sa from common_library.json_serialization import json_dumps, json_loads +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.users import UserID from pydantic import TypeAdapter @@ -177,18 +178,22 @@ async def get_ports_configuration_from_node_uuid( _logger.debug("Found and converted to json") return node_json_config - async def get_project_owner_user_id(self, project_id: ProjectID) -> UserID: + async def get_project_owner_and_product_name( + self, project_id: ProjectID + ) -> tuple[UserID, ProductName]: async with ( DBContextManager( self._db_engine, application_name=self._application_name ) as engine, engine.connect() as connection, ): - prj_owner = await connection.scalar( - sa.select(projects.c.prj_owner).where( + row = await connection.scalar( + sa.select(projects.c.prj_owner, projects.c.product_name).where( projects.c.uuid == f"{project_id}" ) ) - if prj_owner is None: + if row is None: raise ProjectNotFoundError(project_id) - return TypeAdapter(UserID).validate_python(prj_owner) + return TypeAdapter(UserID).validate_python(row[0]), TypeAdapter( + ProductName + ).validate_python(row[1]) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/exceptions.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/exceptions.py index b0381357acae..edc23da3a8bf 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/exceptions.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/exceptions.py @@ -1,7 +1,7 @@ # Errors raised by node_ports module as NodeportsException # # -# NOTE: Error message SHALL explain the reason for the error and it is prefered in one line, i.e. avoid '\n' in message +# NOTE: Error message SHALL explain the reason for the error and it is preferred in one line, i.e. avoid '\n' in message # # @@ -10,7 +10,7 @@ class NodeportsException(Exception): """Basic exception for errors raised in nodeports""" def __init__(self, msg: str | None = None): - super().__init__(msg or "An error occured in simcore") + super().__init__(msg or "An error occurred in simcore") class ReadOnlyError(NodeportsException): @@ -169,3 +169,7 @@ def __init__(self, symlink, symlink_target_path): super().__init__(message) self.symlink = symlink self.symlink_target_path = symlink_target_path + + +class ProductNotSpecified(NodeportsException): + """A product name must be provided""" diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py index 60f44f7a7e65..b3387887bba2 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py @@ -13,6 +13,7 @@ UploadedPart, ) from models_library.basic_types import SHA256Str +from models_library.products import ProductName from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter @@ -62,6 +63,7 @@ async def complete_file_upload( async def get_download_link_from_s3( *, user_id: UserID, + product_name: ProductName, store_name: LocationName | None, store_id: LocationID | None, s3_object: StorageFileID, @@ -76,13 +78,14 @@ async def get_download_link_from_s3( """ async with ClientSessionContextManager(client_session) as session: store_id = await _filemanager_utils.resolve_location_id( - session, user_id, store_name, store_id + session, user_id, product_name, store_name, store_id ) file_link = await storage_client.get_download_file_link( session=session, file_id=s3_object, location_id=store_id, user_id=user_id, + product_name=product_name, link_type=link_type, ) return URL(f"{file_link}") @@ -91,6 +94,7 @@ async def get_download_link_from_s3( async def get_upload_links_from_s3( *, user_id: UserID, + product_name: ProductName, store_name: LocationName | None, store_id: LocationID | None, s3_object: StorageFileID, @@ -102,7 +106,7 @@ async def get_upload_links_from_s3( ) -> tuple[LocationID, FileUploadSchema]: async with ClientSessionContextManager(client_session) as session: store_id = await _filemanager_utils.resolve_location_id( - session, user_id, store_name, store_id + session, user_id, product_name, store_name, store_id ) file_links = await storage_client.get_upload_file_links( session=session, @@ -120,6 +124,7 @@ async def get_upload_links_from_s3( async def download_path_from_s3( *, user_id: UserID, + product_name: ProductName, store_name: LocationName | None, store_id: LocationID | None, s3_object: StorageFileID, @@ -148,10 +153,11 @@ async def download_path_from_s3( async with ClientSessionContextManager(client_session) as session: store_id = await _filemanager_utils.resolve_location_id( - session, user_id, store_name, store_id + session, user_id, product_name, store_name, store_id ) file_meta_data: FileMetaDataGet = await _get_file_meta_data( user_id=user_id, + product_name=product_name, s3_object=s3_object, store_id=store_id, client_session=session, @@ -166,6 +172,7 @@ async def download_path_from_s3( # get the s3 link download_link = await get_download_link_from_s3( user_id=user_id, + product_name=product_name, store_name=store_name, store_id=store_id, s3_object=s3_object, @@ -277,6 +284,7 @@ async def _generate_checksum( async def upload_path( # pylint: disable=too-many-arguments *, user_id: UserID, + product_name: ProductName, store_id: LocationID | None, store_name: LocationName | None, s3_object: StorageFileID, @@ -309,6 +317,7 @@ async def upload_path( # pylint: disable=too-many-arguments with attempt: result = await _upload_path( user_id=user_id, + product_name=product_name, store_id=store_id, store_name=store_name, s3_object=s3_object, @@ -325,6 +334,7 @@ async def upload_path( # pylint: disable=too-many-arguments async def _upload_path( # pylint: disable=too-many-arguments *, user_id: UserID, + product_name: ProductName, store_id: LocationID | None, store_name: LocationName | None, s3_object: StorageFileID, @@ -363,6 +373,7 @@ async def _upload_path( # pylint: disable=too-many-arguments try: store_id, upload_links = await get_upload_links_from_s3( user_id=user_id, + product_name=product_name, store_name=store_name, store_id=store_id, s3_object=s3_object, @@ -454,6 +465,7 @@ async def _upload_to_s3( async def _get_file_meta_data( user_id: UserID, + product_name: ProductName, store_id: LocationID, s3_object: StorageFileID, client_session: ClientSession | None = None, @@ -466,6 +478,7 @@ async def _get_file_meta_data( file_id=s3_object, location_id=store_id, user_id=user_id, + product_name=product_name, ) _logger.debug( "Result for metadata s3_object=%s, result=%s", @@ -477,6 +490,7 @@ async def _get_file_meta_data( async def entry_exists( user_id: UserID, + product_name: ProductName, store_id: LocationID, s3_object: StorageFileID, client_session: ClientSession | None = None, @@ -489,7 +503,7 @@ async def entry_exists( """ try: file_metadata: FileMetaDataGet = await _get_file_meta_data( - user_id, store_id, s3_object, client_session + user_id, product_name, store_id, s3_object, client_session ) result: bool = ( file_metadata.file_id == s3_object @@ -511,6 +525,7 @@ class FileMetaData: async def get_file_metadata( user_id: UserID, + product_name: ProductName, store_id: LocationID, s3_object: StorageFileID, client_session: ClientSession | None = None, @@ -520,6 +535,7 @@ async def get_file_metadata( """ file_metadata: FileMetaDataGet = await _get_file_meta_data( user_id=user_id, + product_name=product_name, store_id=store_id, s3_object=s3_object, client_session=client_session, @@ -534,6 +550,7 @@ async def get_file_metadata( async def delete_file( user_id: UserID, + product_name: ProductName, store_id: LocationID, s3_object: StorageFileID, client_session: ClientSession | None = None, @@ -541,5 +558,9 @@ async def delete_file( async with ClientSessionContextManager(client_session) as session: _logger.debug("Will delete file for s3_object=%s", s3_object) await storage_client.delete_file( - session=session, file_id=s3_object, location_id=store_id, user_id=user_id + session=session, + file_id=s3_object, + location_id=store_id, + user_id=user_id, + product_name=product_name, ) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 7b4b93bde777..664537d7ca22 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -7,6 +7,7 @@ from typing import Any from models_library.api_schemas_storage.storage_schemas import LinkType +from models_library.products import ProductName from models_library.services_io import BaseServiceIOModel from models_library.services_types import ServicePortKey from pydantic import ( @@ -24,6 +25,7 @@ from ..node_ports_common.exceptions import ( AbsoluteSymlinkIsNotUploadableException, InvalidItemTypeError, + ProductNotSpecified, SymlinkToSymlinkIsNotUploadableException, ) from . import port_utils @@ -174,7 +176,10 @@ def _converter(value: ItemConcreteValue) -> ItemConcreteValue: assert self._py_value_converter # nosec async def get_value( - self, *, file_link_type: LinkType | None = None + self, + *, + file_link_type: LinkType | None = None, + product_name: ProductName | None = None, ) -> ItemValue | None: """Resolves data links and returns resulted value @@ -209,11 +214,15 @@ async def _evaluate() -> ItemValue | None: return other_port_itemvalue if isinstance(self.value, FileLink): + if product_name is None: + raise ProductNotSpecified + # let's get the download/upload link from storage url_itemvalue: AnyUrl | None = ( await port_utils.get_download_link_from_storage( # pylint: disable=protected-access user_id=self._node_ports.user_id, + product_name=product_name, value=self.value, link_type=file_link_type, ) @@ -237,7 +246,9 @@ async def _evaluate() -> ItemValue | None: return v async def get( - self, progress_bar: ProgressBarData | None = None + self, + progress_bar: ProgressBarData | None = None, + product_name: ProductName | None = None, ) -> ItemConcreteValue | None: """ Transforms DataItemValue value -> ItemConcreteValue @@ -271,8 +282,13 @@ async def _evaluate() -> ItemConcreteValue | None: elif isinstance(self.value, FileLink): # this is a link from storage + + if product_name is None: + raise ProductNotSpecified + value = await port_utils.pull_file_from_store( user_id=self._node_ports.user_id, + product_name=product_name, key=self.key, file_to_key_map=self.file_to_key_map, value=self.value, @@ -295,7 +311,7 @@ async def _evaluate() -> ItemConcreteValue | None: # otherwise, this is a BasicValueTypes value = self.value - # don't atempt conversion of None it fails + # don't attempt conversion of None it fails if value is None: return None @@ -314,6 +330,7 @@ async def _set( *, set_kwargs: SetKWargs | None = None, progress_bar: ProgressBarData, + product_name: ProductName | None = None, ) -> None: """ :raises InvalidItemTypeError @@ -345,9 +362,13 @@ async def _set( if set_kwargs and set_kwargs.file_base_path: base_path = set_kwargs.file_base_path / self.key + if product_name is None: + raise ProductNotSpecified + new_value = await port_utils.push_file_to_store( file=converted_value, user_id=self._node_ports.user_id, + product_name=product_name, project_id=self._node_ports.project_id, node_id=self._node_ports.node_uuid, r_clone_settings=self._node_ports.r_clone_settings, @@ -371,6 +392,7 @@ async def set( new_value: ItemConcreteValue, *, progress_bar: ProgressBarData | None = None, + product_name: ProductName | None = None, **set_kwargs, ) -> None: """sets a value to the port, by default it is also stored in the database @@ -383,10 +405,13 @@ async def set( **set_kwargs, progress_bar=progress_bar or ProgressBarData(num_steps=1, description="set"), + product_name=product_name, ) await self._node_ports.save_to_db_cb(self._node_ports) - async def set_value(self, new_item_value: ItemValue | None) -> None: + async def set_value( + self, new_item_value: ItemValue | None, product_name: ProductName | None = None + ) -> None: """set the value on the port using an item-value :raises InvalidItemTypeError @@ -399,9 +424,13 @@ async def set_value(self, new_item_value: ItemValue | None) -> None: if not isinstance(new_item_value, AnyUrl): raise InvalidItemTypeError(self.property_type, f"{new_item_value}") + if product_name is None: + raise ProductNotSpecified + new_filelink: FileLink = await port_utils.get_file_link_from_url( new_item_value, self._node_ports.user_id, + product_name, self._node_ports.project_id, self._node_ports.node_uuid, ) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py index 0447408afb82..e7bb9246e040 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py @@ -9,6 +9,7 @@ LinkType, ) from models_library.basic_types import SHA256Str +from models_library.products import ProductName from models_library.services_types import FileName, ServicePortKey from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter @@ -83,7 +84,7 @@ async def get_value_from_link( async def get_download_link_from_storage( - user_id: UserID, value: FileLink, link_type: LinkType + user_id: UserID, product_name: ProductName, value: FileLink, link_type: LinkType ) -> AnyUrl: """ :raises exceptions.NodeportsException @@ -95,6 +96,7 @@ async def get_download_link_from_storage( link = await filemanager.get_download_link_from_s3( user_id=user_id, + product_name=product_name, store_id=value.store, store_name=None, s3_object=value.path, @@ -108,7 +110,12 @@ async def get_download_link_from_storage( async def get_download_link_from_storage_overload( - user_id: UserID, project_id: str, node_id: str, file_name: str, link_type: LinkType + user_id: UserID, + product_name: ProductName, + project_id: str, + node_id: str, + file_name: str, + link_type: LinkType, ) -> AnyUrl: """Overloads get_download_link_from_storage with arguments that match those in get_upload_link_from_storage @@ -120,6 +127,7 @@ async def get_download_link_from_storage_overload( ) link = await filemanager.get_download_link_from_s3( user_id=user_id, + product_name=product_name, store_name=None, store_id=SIMCORE_LOCATION, s3_object=s3_object, @@ -131,6 +139,7 @@ async def get_download_link_from_storage_overload( async def get_upload_links_from_storage( user_id: UserID, + product_name: ProductName, project_id: str, node_id: str, file_name: str, @@ -144,6 +153,7 @@ async def get_upload_links_from_storage( ) _, links = await filemanager.get_upload_links_from_s3( user_id=user_id, + product_name=product_name, store_id=SIMCORE_LOCATION, store_name=None, s3_object=s3_object, @@ -156,7 +166,11 @@ async def get_upload_links_from_storage( async def target_link_exists( - user_id: UserID, project_id: str, node_id: str, file_name: str + user_id: UserID, + product_name: ProductName, + project_id: str, + node_id: str, + file_name: str, ) -> bool: log.debug( "checking if target of link to file from storage for %s exists", file_name @@ -166,6 +180,7 @@ async def target_link_exists( ) return await filemanager.entry_exists( user_id=user_id, + product_name=product_name, store_id=SIMCORE_LOCATION, s3_object=s3_object, is_directory=False, @@ -173,19 +188,27 @@ async def target_link_exists( async def delete_target_link( - user_id: UserID, project_id: str, node_id: str, file_name: str + user_id: UserID, + product_name: ProductName, + project_id: str, + node_id: str, + file_name: str, ) -> None: log.debug("deleting target of link to file from storage for %s", file_name) s3_object = data_items_utils.create_simcore_file_id( Path(file_name), project_id, node_id ) return await filemanager.delete_file( - user_id=user_id, store_id=SIMCORE_LOCATION, s3_object=s3_object + user_id=user_id, + product_name=product_name, + store_id=SIMCORE_LOCATION, + s3_object=s3_object, ) async def pull_file_from_store( user_id: UserID, + product_name: ProductName, key: str, file_to_key_map: dict[FileName, ServicePortKey] | None, value: FileLink, @@ -198,6 +221,7 @@ async def pull_file_from_store( local_path = data_items_utils.get_folder_path(key) downloaded_file = await filemanager.download_path_from_s3( user_id=user_id, + product_name=product_name, store_id=value.store, store_name=None, s3_object=value.path, @@ -223,6 +247,7 @@ async def push_file_to_store( *, file: Path, user_id: UserID, + product_name: ProductName, project_id: str, node_id: str, io_log_redirect_cb: LogRedirectCB | None, @@ -244,6 +269,7 @@ async def push_file_to_store( upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=SIMCORE_LOCATION, store_name=None, s3_object=s3_object, @@ -301,6 +327,7 @@ def is_file_type(port_type: str) -> bool: async def get_file_link_from_url( new_value: AnyUrl, user_id: UserID, + product_name: ProductName, project_id: str, node_id: str, ) -> FileLink: @@ -311,6 +338,7 @@ async def get_file_link_from_url( ) file_metadata = await filemanager.get_file_metadata( user_id=user_id, + product_name=product_name, store_id=SIMCORE_LOCATION, s3_object=s3_object, ) diff --git a/packages/simcore-sdk/tests/integration/conftest.py b/packages/simcore-sdk/tests/integration/conftest.py index 2eb7e7fd47d7..f98d32545cbb 100644 --- a/packages/simcore-sdk/tests/integration/conftest.py +++ b/packages/simcore-sdk/tests/integration/conftest.py @@ -15,15 +15,21 @@ from aiohttp import ClientSession from models_library.api_schemas_storage.storage_schemas import FileUploadSchema from models_library.generics import Envelope +from models_library.products import ProductName from models_library.projects_nodes_io import LocationID, NodeIDStr, SimcoreS3FileID from models_library.users import UserID from pydantic import TypeAdapter -from pytest_simcore.helpers.faker_factories import random_project, random_user +from pytest_simcore.helpers.faker_factories import ( + random_product, + random_project, + random_user, +) from pytest_simcore.helpers.postgres_tools import sync_insert_and_get_row_lifespan from settings_library.r_clone import RCloneSettings, S3Provider from settings_library.s3 import S3Settings from simcore_postgres_database.models.comp_tasks import comp_tasks from simcore_postgres_database.models.file_meta_data import file_meta_data +from simcore_postgres_database.models.products import products from simcore_postgres_database.models.projects import projects from simcore_postgres_database.models.users import users from simcore_sdk.node_ports_common.r_clone import is_r_clone_available @@ -52,13 +58,26 @@ def user_id(postgres_db: sa.engine.Engine) -> Iterable[UserID]: @pytest.fixture -def project_id(user_id: int, postgres_db: sa.engine.Engine) -> Iterable[str]: +def product_name(postgres_db: sa.engine.Engine) -> Iterable[ProductName]: + with sync_insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + postgres_db, + table=products, + values=random_product(), + pk_col=products.c.name, + ) as row: + yield row["name"] + + +@pytest.fixture +def project_id( + user_id: int, product_name: ProductName, postgres_db: sa.engine.Engine +) -> Iterable[str]: # inject project for user in db. This will give user_id, the full project's ownership # pylint: disable=no-value-for-parameter stmt = ( projects.insert() - .values(**random_project(prj_owner=user_id)) + .values(**random_project(prj_owner=user_id, product_name=product_name)) .returning(projects.c.uuid) ) print(f"{stmt}") diff --git a/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py index d43433a005c9..ad21c805e20a 100644 --- a/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py +++ b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py @@ -14,6 +14,7 @@ import pytest from faker import Faker +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID from models_library.users import UserID @@ -151,6 +152,7 @@ async def test_valid_upload_download( node_ports_config: None, content_path: Path, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, r_clone_settings: RCloneSettings, @@ -160,6 +162,7 @@ async def test_valid_upload_download( async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: await data_manager._push_directory( # noqa: SLF001 user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, source_path=content_path, @@ -175,6 +178,7 @@ async def test_valid_upload_download( await data_manager._pull_directory( # noqa: SLF001 user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, destination_path=content_path, @@ -193,6 +197,7 @@ async def test_valid_upload_download_saved_to( node_ports_config, content_path: Path, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, random_tmp_dir_generator: Callable, @@ -203,6 +208,7 @@ async def test_valid_upload_download_saved_to( async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: await data_manager._push_directory( # noqa: SLF001 user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, source_path=content_path, @@ -221,6 +227,7 @@ async def test_valid_upload_download_saved_to( await data_manager._pull_directory( # noqa: SLF001 user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, destination_path=content_path, @@ -240,6 +247,7 @@ async def test_delete_legacy_archive( node_ports_config, content_path: Path, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, r_clone_settings: RCloneSettings, @@ -256,6 +264,7 @@ async def test_delete_legacy_archive( await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=SIMCORE_LOCATION, store_name=None, s3_object=TypeAdapter(SimcoreS3FileID).validate_python( @@ -273,6 +282,7 @@ async def test_delete_legacy_archive( assert ( await data_manager._state_metadata_entry_exists( # noqa: SLF001 user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, path=content_path, @@ -291,6 +301,7 @@ async def test_delete_legacy_archive( assert ( await data_manager._state_metadata_entry_exists( # noqa: SLF001 user_id=user_id, + product_name=product_name, project_id=project_id, node_uuid=node_uuid, path=content_path, diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py index 9b4dd6f3b67a..6e21bc204d85 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py @@ -13,6 +13,7 @@ import pytest from aiohttp import ClientError from faker import Faker +from models_library.products import ProductName from models_library.projects_nodes_io import ( LocationID, SimcoreS3DirectoryID, @@ -84,7 +85,8 @@ def _file_size(size_str: str, **pytest_params): async def test_valid_upload_download( node_ports_config: None, tmpdir: Path, - user_id: int, + user_id: UserID, + product_name: ProductName, create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], s3_simcore_location: LocationID, file_size: ByteSize, @@ -100,6 +102,7 @@ async def test_valid_upload_download( async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, store_name=None, s3_object=file_id, @@ -115,7 +118,10 @@ async def test_valid_upload_download( assert store_id == s3_simcore_location assert e_tag file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id + user_id=user_id, + product_name=product_name, + store_id=store_id, + s3_object=file_id, ) assert file_metadata.location == store_id assert file_metadata.etag == e_tag @@ -123,6 +129,7 @@ async def test_valid_upload_download( download_folder = Path(tmpdir) / "downloads" download_file_path = await filemanager.download_path_from_s3( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, store_name=None, s3_object=file_id, @@ -148,7 +155,8 @@ async def test_valid_upload_download( async def test_valid_upload_download_using_file_object( node_ports_config: None, tmpdir: Path, - user_id: int, + user_id: UserID, + product_name: ProductName, create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], s3_simcore_location: LocationID, file_size: ByteSize, @@ -162,6 +170,7 @@ async def test_valid_upload_download_using_file_object( with file_path.open("rb") as file_object: upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, store_name=None, s3_object=file_id, @@ -176,7 +185,7 @@ async def test_valid_upload_download_using_file_object( assert store_id == s3_simcore_location assert e_tag file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id + user_id=user_id, product_name=product_name, store_id=store_id, s3_object=file_id ) assert file_metadata.location == store_id assert file_metadata.etag == e_tag @@ -185,6 +194,7 @@ async def test_valid_upload_download_using_file_object( async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: download_file_path = await filemanager.download_path_from_s3( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, store_name=None, s3_object=file_id, @@ -228,6 +238,7 @@ async def test_failed_upload_is_properly_removed_from_storage( optional_sync_settings: _SyncSettings, file_size: ByteSize, user_id: UserID, + product_name: ProductName, mocked_upload_file_raising_exceptions: None, ): file_path = create_file_of_size(file_size) @@ -235,6 +246,7 @@ async def test_failed_upload_is_properly_removed_from_storage( with pytest.raises(exceptions.S3TransferError): await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, store_name=None, s3_object=file_id, @@ -244,7 +256,10 @@ async def test_failed_upload_is_properly_removed_from_storage( ) with pytest.raises(exceptions.S3InvalidPathError): await filemanager.get_file_metadata( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + user_id=user_id, + product_name=product_name, + store_id=s3_simcore_location, + s3_object=file_id, ) @@ -263,6 +278,7 @@ async def test_failed_upload_after_valid_upload_keeps_last_valid_state( optional_sync_settings: _SyncSettings, file_size: ByteSize, user_id: UserID, + product_name: ProductName, mocker: MockerFixture, ): # upload a valid file @@ -270,6 +286,7 @@ async def test_failed_upload_after_valid_upload_keeps_last_valid_state( file_id = create_valid_file_uuid("", file_path) upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, store_name=None, s3_object=file_id, @@ -283,7 +300,7 @@ async def test_failed_upload_after_valid_upload_keeps_last_valid_state( assert e_tag # check the file is correctly uploaded file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id + user_id=user_id, product_name=product_name, store_id=store_id, s3_object=file_id ) assert file_metadata.location == store_id assert file_metadata.etag == e_tag @@ -301,6 +318,7 @@ async def test_failed_upload_after_valid_upload_keeps_last_valid_state( with pytest.raises(exceptions.S3TransferError): await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, store_name=None, s3_object=file_id, @@ -310,7 +328,10 @@ async def test_failed_upload_after_valid_upload_keeps_last_valid_state( ) # the file shall be back to its original state file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + user_id=user_id, + product_name=product_name, + store_id=s3_simcore_location, + s3_object=file_id, ) assert file_metadata.location == store_id assert file_metadata.etag == e_tag @@ -319,7 +340,8 @@ async def test_failed_upload_after_valid_upload_keeps_last_valid_state( async def test_invalid_file_path( node_ports_config: None, tmpdir: Path, - user_id: int, + user_id: UserID, + product_name: ProductName, create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], s3_simcore_location: LocationID, optional_sync_settings: _SyncSettings, @@ -334,6 +356,7 @@ async def test_invalid_file_path( with pytest.raises(FileNotFoundError): await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=store, store_name=None, s3_object=file_id, @@ -348,6 +371,7 @@ async def test_invalid_file_path( ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, + product_name=product_name, store_id=store, store_name=None, s3_object=file_id, @@ -362,6 +386,7 @@ async def test_errors_upon_invalid_file_identifiers( node_ports_config: None, tmpdir: Path, user_id: UserID, + product_name: ProductName, project_id: str, s3_simcore_location: LocationID, optional_sync_settings: _SyncSettings, @@ -376,6 +401,7 @@ async def test_errors_upon_invalid_file_identifiers( invalid_s3_path = "" await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=store, store_name=None, s3_object=invalid_s3_path, @@ -387,6 +413,7 @@ async def test_errors_upon_invalid_file_identifiers( invalid_file_id = "file_id" await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=store, store_name=None, s3_object=invalid_file_id, @@ -402,6 +429,7 @@ async def test_errors_upon_invalid_file_identifiers( invalid_s3_path = "" await filemanager.download_path_from_s3( user_id=user_id, + product_name=product_name, store_id=store, store_name=None, s3_object=invalid_s3_path, @@ -417,6 +445,7 @@ async def test_errors_upon_invalid_file_identifiers( ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, + product_name=product_name, store_id=store, store_name=None, s3_object=TypeAdapter(SimcoreS3FileID).validate_python( @@ -432,7 +461,8 @@ async def test_errors_upon_invalid_file_identifiers( async def test_invalid_store( node_ports_config: None, tmpdir: Path, - user_id: int, + user_id: UserID, + product_name: ProductName, create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], optional_sync_settings: _SyncSettings, faker: Faker, @@ -446,6 +476,7 @@ async def test_invalid_store( with pytest.raises(exceptions.S3InvalidStore): await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=None, store_name=store, # type: ignore s3_object=file_id, @@ -460,6 +491,7 @@ async def test_invalid_store( ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, + product_name=product_name, store_id=None, store_name=store, # type: ignore s3_object=file_id, @@ -474,7 +506,8 @@ async def test_invalid_store( async def test_valid_metadata( node_ports_config: None, tmpdir: Path, - user_id: int, + user_id: UserID, + product_name: ProductName, create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], s3_simcore_location: LocationID, is_directory: bool, @@ -491,6 +524,7 @@ async def test_valid_metadata( is_metadata_present = await filemanager.entry_exists( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, s3_object=file_id, is_directory=is_directory, @@ -504,6 +538,7 @@ async def test_valid_metadata( file_id = create_valid_file_uuid("", path_to_upload) upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, store_name=None, s3_object=file_id, @@ -520,6 +555,7 @@ async def test_valid_metadata( is_metadata_present = await filemanager.entry_exists( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, s3_object=file_id, is_directory=is_directory, @@ -540,6 +576,7 @@ async def test_invalid_call_raises_exception( node_ports_config: None, tmpdir: Path, user_id: int, + product_name: ProductName, create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], s3_simcore_location: LocationID, fct: Callable[[int, str, str, Any | None], Awaitable], @@ -552,6 +589,7 @@ async def test_invalid_call_raises_exception( with pytest.raises(exceptions.StorageInvalidCall): await fct( user_id=None, + product_name=product_name, store_id=s3_simcore_location, s3_object=file_id, **extra_kwargs, # type: ignore @@ -561,6 +599,7 @@ async def test_invalid_call_raises_exception( with pytest.raises(exceptions.StorageInvalidCall): await fct( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, s3_object="bing", **extra_kwargs, # type: ignore @@ -570,7 +609,8 @@ async def test_invalid_call_raises_exception( async def test_delete_file( node_ports_config: None, tmpdir: Path, - user_id: int, + user_id: UserID, + product_name: ProductName, create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], s3_simcore_location: LocationID, storage_service: URL, @@ -582,6 +622,7 @@ async def test_delete_file( file_id = create_valid_file_uuid("", file_path) upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, store_name=None, s3_object=file_id, @@ -594,18 +635,29 @@ async def test_delete_file( assert e_tag is_metadata_present = await filemanager.entry_exists( - user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False + user_id=user_id, + product_name=product_name, + store_id=store_id, + s3_object=file_id, + is_directory=False, ) assert is_metadata_present is True await filemanager.delete_file( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + user_id=user_id, + product_name=product_name, + store_id=s3_simcore_location, + s3_object=file_id, ) # check that it disappeared assert ( await filemanager.entry_exists( - user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False + user_id=user_id, + product_name=product_name, + store_id=store_id, + s3_object=file_id, + is_directory=False, ) is False ) @@ -617,7 +669,8 @@ async def test_upload_path_source_is_a_folder( project_id: str, tmp_path: Path, faker: Faker, - user_id: int, + user_id: UserID, + product_name: ProductName, s3_simcore_location: LocationID, files_in_folder: int, r_clone_settings: RCloneSettings, @@ -638,6 +691,7 @@ async def test_upload_path_source_is_a_folder( upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( user_id=user_id, + product_name=product_name, store_id=s3_simcore_location, store_name=None, s3_object=s3_object, @@ -651,6 +705,7 @@ async def test_upload_path_source_is_a_folder( async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, + product_name=product_name, store_name=None, store_id=s3_simcore_location, s3_object=s3_object, diff --git a/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py b/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py index 411aa5f02abb..327a04faa7d0 100644 --- a/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py +++ b/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py @@ -10,8 +10,10 @@ import pytest from faker import Faker +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID +from models_library.users import UserID from pytest_mock import MockerFixture from servicelib.progress_bar import ProgressBarData from settings_library.r_clone import RCloneSettings, S3Provider @@ -65,7 +67,8 @@ def node_uuid(node_uuid: str) -> NodeID: async def test_push_folder( - user_id: int, + user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, mocker: MockerFixture, @@ -97,6 +100,7 @@ async def test_push_folder( async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: await data_manager._push_directory( # noqa: SLF001 user_id, + product_name, project_id, node_uuid, test_folder, @@ -120,7 +124,8 @@ async def test_push_folder( async def test_push_file( - user_id: int, + user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, mocker, @@ -145,6 +150,7 @@ async def test_push_file( async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: await data_manager._push_directory( # noqa: SLF001 user_id, + product_name, project_id, node_uuid, file_path, @@ -170,7 +176,8 @@ async def test_push_file( @pytest.mark.parametrize("create_legacy_archive", [False, True]) async def test_pull_legacy_archive( - user_id: int, + user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, mocker, @@ -228,6 +235,7 @@ async def test_pull_legacy_archive( async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: await data_manager._pull_legacy_archive( # noqa: SLF001 user_id, + product_name, project_id, node_uuid, test_folder, @@ -250,18 +258,19 @@ async def test_pull_legacy_archive( progress_bar=progress_bar._children[0], # noqa: SLF001 ) - matchs, mismatchs, errors = cmpfiles( + matches, mismatches, errors = cmpfiles( test_folder, test_control_folder, [x.name for x in test_control_folder.glob("**/*")], ) - assert len(matchs) == files_number - assert not mismatchs + assert len(matches) == files_number + assert not mismatches assert not errors async def test_pull_directory( - user_id: int, + user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, mocker, @@ -286,6 +295,7 @@ async def test_pull_directory( async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: await data_manager._pull_directory( # noqa: SLF001 user_id, + product_name, project_id, node_uuid, fake_download_folder, From e401184efab62c3369ab95d8c9f9a50b351b04f4 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 19 Dec 2025 11:08:14 +0100 Subject: [PATCH 28/35] fix missing --- .../simcore_service_director_v2/utils/dask.py | 29 ++++++++++++++----- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask.py b/services/director-v2/src/simcore_service_director_v2/utils/dask.py index 4600f201b140..0aaca0012ebd 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask.py @@ -22,6 +22,7 @@ from models_library.api_schemas_directorv2.services import NodeRequirements from models_library.docker import DockerLabelKey from models_library.errors import ErrorDict +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.services import ServiceKey, ServiceVersion @@ -46,7 +47,7 @@ from ..core.errors import ( ComputationalBackendNotConnectedError, ComputationalSchedulerChangedError, - InsuficientComputationalResourcesError, + InsufficientComputationalResourcesError, MissingComputationalResourcesError, PortsValidationError, ) @@ -219,6 +220,7 @@ async def compute_input_data( async def compute_output_data_schema( *, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, file_link_type: FileLinkType, @@ -236,6 +238,7 @@ async def compute_output_data_schema( if port_utils.is_file_type(port.property_type): value_links = await port_utils.get_upload_links_from_storage( user_id=user_id, + product_name=product_name, project_id=f"{project_id}", node_id=f"{node_id}", file_name=( @@ -265,12 +268,14 @@ async def compute_output_data_schema( async def compute_service_log_file_upload_link( user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, file_link_type: FileLinkType, ) -> AnyUrl: value_links = await port_utils.get_upload_links_from_storage( user_id=user_id, + product_name=product_name, project_id=f"{project_id}", node_id=f"{node_id}", file_name=LOGS_FILE_NAME, @@ -360,6 +365,7 @@ async def compute_task_envs( async def _get_service_log_file_download_link( user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, file_link_type: FileLinkType, @@ -372,6 +378,7 @@ async def _get_service_log_file_download_link( try: value_link: AnyUrl = await port_utils.get_download_link_from_storage_overload( user_id=user_id, + product_name=product_name, project_id=f"{project_id}", node_id=f"{node_id}", file_name=LOGS_FILE_NAME, @@ -384,16 +391,20 @@ async def _get_service_log_file_download_link( async def get_task_log_file( - user_id: UserID, project_id: ProjectID, node_id: NodeID + user_id: UserID, product_name: ProductName, project_id: ProjectID, node_id: NodeID ) -> TaskLogFileGet: try: log_file_url = await _get_service_log_file_download_link( - user_id, project_id, node_id, file_link_type=FileLinkType.PRESIGNED + user_id, + product_name=product_name, + project_id=project_id, + node_id=node_id, + file_link_type=FileLinkType.PRESIGNED, ) except NodeportsException as err: # Unexpected error: Cannot determine the cause of failure - # to get donwload link and cannot handle it automatically. + # to get download link and cannot handle it automatically. # Will treat it as "not available" and log a warning log_file_url = None _logger.warning( @@ -411,6 +422,7 @@ async def get_task_log_file( async def clean_task_output_and_log_files_if_invalid( db_engine: AsyncEngine, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, ports: node_ports_v2.Nodeports | None = None, @@ -431,22 +443,23 @@ async def clean_task_output_and_log_files_if_invalid( next(iter(port.file_to_key_map)) if port.file_to_key_map else port.key ) if await port_utils.target_link_exists( - user_id, f"{project_id}", f"{node_id}", file_name + user_id, product_name, f"{project_id}", f"{node_id}", file_name ): continue _logger.debug("entry %s is invalid, cleaning...", port.key) await port_utils.delete_target_link( - user_id, f"{project_id}", f"{node_id}", file_name + user_id, product_name, f"{project_id}", f"{node_id}", file_name ) # check log file if not await port_utils.target_link_exists( user_id=user_id, + product_name=product_name, project_id=f"{project_id}", node_id=f"{node_id}", file_name=LOGS_FILE_NAME, ): await port_utils.delete_target_link( - user_id, f"{project_id}", f"{node_id}", LOGS_FILE_NAME + user_id, product_name, f"{project_id}", f"{node_id}", LOGS_FILE_NAME ) @@ -591,7 +604,7 @@ def check_if_cluster_is_able_to_run_pipeline( ) # well then our workers are not powerful enough - raise InsuficientComputationalResourcesError( + raise InsufficientComputationalResourcesError( project_id=project_id, node_id=node_id, service_name=node_image.name, From 36959d4d120b92f7abff85ee1e05272ba4525dbd Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 19 Dec 2025 11:14:24 +0100 Subject: [PATCH 29/35] fix missing param --- .../src/simcore_service_api_server/api/routes/files.py | 4 +++- .../src/simcore_service_api_server/api/routes/programs.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/files.py b/services/api-server/src/simcore_service_api_server/api/routes/files.py index 4d8658e3fe5a..be63947eaabe 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/files.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/files.py @@ -47,7 +47,7 @@ from ...models.schemas.jobs import UserFileToProgramJob from ...services_http.storage import StorageApi, StorageFileMetaData, to_file_api_model from ...services_http.webserver import AuthSession -from ..dependencies.authentication import get_current_user_id +from ..dependencies.authentication import get_current_user_id, get_product_name from ..dependencies.services import get_api_client from ._common import API_SERVER_DEV_FEATURES_ENABLED from ._constants import ( @@ -220,6 +220,7 @@ async def upload_file( request: Request, file: Annotated[UploadFile, FileParam(...)], user_id: Annotated[int, Depends(get_current_user_id)], + product_name: Annotated[str, Depends(get_product_name)], content_length: str | None = Header(None), ): """Uploads a single file to the system""" @@ -253,6 +254,7 @@ async def upload_file( # upload to S3 using pre-signed link upload_result: UploadedFolder | UploadedFile = await storage_upload_path( user_id=user_id, + product_name=product_name, store_id=SIMCORE_LOCATION, store_name=None, s3_object=file_meta.storage_file_id, diff --git a/services/api-server/src/simcore_service_api_server/api/routes/programs.py b/services/api-server/src/simcore_service_api_server/api/routes/programs.py index 86910bf754a8..27ea1cf020a2 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/programs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/programs.py @@ -29,7 +29,7 @@ from ...models.pagination import Page, PaginationParams from ...models.schemas.jobs import Job, JobInputs from ...models.schemas.programs import Program, ProgramKeyId -from ..dependencies.authentication import get_current_user_id +from ..dependencies.authentication import get_current_user_id, get_product_name from ..dependencies.services import get_job_service, get_program_service _logger = logging.getLogger(__name__) @@ -152,6 +152,7 @@ async def create_program_job( program_key: ProgramKeyId, version: VersionStr, user_id: Annotated[PositiveInt, Depends(get_current_user_id)], + product_name: Annotated[str, Depends(get_product_name)], program_service: Annotated[ProgramService, Depends(get_program_service)], job_service: Annotated[JobService, Depends(get_job_service)], x_simcore_parent_project_uuid: Annotated[ProjectID | None, Header()] = None, @@ -194,6 +195,7 @@ async def create_program_job( _, file_upload_schema = await get_upload_links_from_s3( user_id=user_id, + product_name=product_name, store_name=None, store_id=SIMCORE_LOCATION, s3_object=f"{project.uuid}/{node_id}/workspace", From fcaf065ca511df54f4b7c55f5cd83074872d0766 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 19 Dec 2025 11:24:39 +0100 Subject: [PATCH 30/35] fix missing param --- .../modules/long_running_tasks.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py index 9387a3867fae..d53e1a26b25b 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py @@ -353,6 +353,7 @@ async def _restore_state_folder( ) -> None: await data_manager.pull( user_id=settings.DY_SIDECAR_USER_ID, + product_name=settings.DY_SIDECAR_PRODUCT_NAME, project_id=settings.DY_SIDECAR_PROJECT_ID, node_uuid=settings.DY_SIDECAR_NODE_ID, destination_path=Path(state_path), @@ -426,6 +427,7 @@ async def _save_state_folder( ) -> None: await data_manager.push( user_id=settings.DY_SIDECAR_USER_ID, + product_name=settings.DY_SIDECAR_PRODUCT_NAME, project_id=settings.DY_SIDECAR_PROJECT_ID, node_uuid=settings.DY_SIDECAR_NODE_ID, source_path=state_path, From d82f1e0cc1a7642486841a10bbda6f11b9955e5a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 19 Dec 2025 11:28:58 +0100 Subject: [PATCH 31/35] fix missing param --- .../src/simcore_service_director_v2/modules/dask_client.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py index 169b7e522d48..d8bbf9408590 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py @@ -49,6 +49,7 @@ ) from fastapi import FastAPI from models_library.clusters import ClusterAuthentication, ClusterTypeInModel +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState @@ -263,6 +264,7 @@ async def send_computation_tasks( self, *, user_id: UserID, + product_name: ProductName, project_id: ProjectID, tasks: dict[NodeID, Image], callback: _UserCallbackInSepThread, @@ -279,7 +281,7 @@ async def send_computation_tasks( - ComputationalSchedulerChangedError when expected scheduler changed - ComputationalBackendNotConnectedError when scheduler is not connected/running - MissingComputationalResourcesError (only for internal cluster) - - InsuficientComputationalResourcesError (only for internal cluster) + - InsufficientComputationalResourcesError (only for internal cluster) - TaskSchedulingError when any other error happens """ @@ -348,6 +350,7 @@ async def send_computation_tasks( ) output_data_keys = await dask_utils.compute_output_data_schema( user_id=user_id, + product_name=product_name, project_id=project_id, node_id=node_id, node_ports=node_ports, @@ -355,6 +358,7 @@ async def send_computation_tasks( ) log_file_url = await dask_utils.compute_service_log_file_upload_link( user_id, + product_name, project_id, node_id, file_link_type=self.tasks_file_link_type, From 4c9981e80ab9e9a6c174e07982aafff3caf1175b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 19 Dec 2025 11:42:21 +0100 Subject: [PATCH 32/35] fix missing param --- .../api/routes/computations_tasks.py | 11 ++++++++--- .../api/rpc/_computations.py | 9 ++++++--- .../src/simcore_service_director_v2/core/errors.py | 4 ++-- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations_tasks.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations_tasks.py index a97b2b60f65b..0f77ece42d9b 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations_tasks.py @@ -1,6 +1,6 @@ """CRUD operations on a computation's tasks sub-resource -A task is computation sub-resource that respresents a running computational service in the pipeline described above +A task is computation sub-resource that represents a running computational service in the pipeline described above Therefore, - the task ID is the same as the associated node uuid @@ -15,6 +15,7 @@ TasksOutputs, TasksSelection, ) +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID @@ -46,6 +47,7 @@ ) async def get_all_tasks_log_files( user_id: UserID, + product_name: ProductName, project_id: ProjectID, comp_pipelines_repo: Annotated[ CompPipelinesRepository, Depends(get_repository(CompPipelinesRepository)) @@ -69,7 +71,7 @@ async def get_all_tasks_log_files( tasks_logs_files: list[TaskLogFileGet] = await logged_gather( *[ - dask_utils.get_task_log_file(user_id, project_id, node_id) + dask_utils.get_task_log_file(user_id, product_name, project_id, node_id) for node_id in iter_task_ids ], reraise=True, @@ -85,6 +87,7 @@ async def get_all_tasks_log_files( ) async def get_task_log_file( user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_uuid: NodeID, comp_tasks_repo: Annotated[ @@ -101,7 +104,9 @@ async def get_task_log_file( detail=[f"No task_id={node_uuid} found under computation {project_id}"], ) - return await dask_utils.get_task_log_file(user_id, project_id, node_uuid) + return await dask_utils.get_task_log_file( + user_id, product_name, project_id, node_uuid + ) @router.post( diff --git a/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py b/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py index 25d2a133ec57..71b4fa40ac39 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py @@ -132,11 +132,14 @@ async def list_computation_collection_runs_page( async def _fetch_task_log( - user_id: UserID, task: CompRunSnapshotTaskDBGet | ComputationTaskForRpcDBGet + user_id: UserID, + product_name: ProductName, + task: CompRunSnapshotTaskDBGet | ComputationTaskForRpcDBGet, ) -> TaskLogFileGet | None: if not task.state.is_running(): return await dask_utils.get_task_log_file( user_id=user_id, + product_name=product_name, project_id=task.project_uuid, node_id=task.node_id, ) @@ -200,7 +203,7 @@ async def list_computations_latest_iteration_tasks_page( # Run all log fetches concurrently log_files = await limited_gather( - *[_fetch_task_log(user_id, task) for task in comp_tasks], + *[_fetch_task_log(user_id, product_name, task) for task in comp_tasks], limit=20, ) @@ -260,7 +263,7 @@ async def list_computation_collection_run_tasks_page( # Run all log fetches concurrently log_files = await limited_gather( - *[_fetch_task_log(user_id, task) for task in comp_tasks], + *[_fetch_task_log(user_id, product_name, task) for task in comp_tasks], limit=20, ) diff --git a/services/director-v2/src/simcore_service_director_v2/core/errors.py b/services/director-v2/src/simcore_service_director_v2/core/errors.py index 2e5f556c4ed6..107a17917b81 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/core/errors.py @@ -2,7 +2,7 @@ TODO: Exceptions should provide all info to create Error instances of the API model -For instance, assume there is a ficticious exception class FieldValidationError, then it would +For instance, assume there is a fictitious exception class FieldValidationError, then it would translate into something like // response - 422 @@ -115,7 +115,7 @@ class MissingComputationalResourcesError( ) -class InsuficientComputationalResourcesError( +class InsufficientComputationalResourcesError( TaskSchedulingError ): # pylint: disable=too-many-ancestors msg_template: str = ( From dc2497d07cc9356ea46ba0c210ab6a3cd54200e6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 19 Dec 2025 11:56:02 +0100 Subject: [PATCH 33/35] fix missing param --- .../tests/unit/test_modules_dask_client.py | 35 +++++++++++++++---- 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py index d9b205b5cba9..724696e6cab6 100644 --- a/services/director-v2/tests/unit/test_modules_dask_client.py +++ b/services/director-v2/tests/unit/test_modules_dask_client.py @@ -41,6 +41,7 @@ from fastapi.applications import FastAPI from models_library.api_schemas_directorv2.services import NodeRequirements from models_library.clusters import ClusterTypeInModel, NoAuthentication +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState @@ -58,7 +59,7 @@ ComputationalBackendNotConnectedError, ComputationalBackendTaskNotFoundError, ComputationalSchedulerChangedError, - InsuficientComputationalResourcesError, + InsufficientComputationalResourcesError, MissingComputationalResourcesError, ) from simcore_service_director_v2.models.comp_runs import RunMetadataDict @@ -214,7 +215,7 @@ def neg(x): result = await future assert result == -285 except AttributeError: - # enforces existance of 'app.state.engine' and sets to None + # enforces existence of 'app.state.engine' and sets to None client.app.state.engine = None return client @@ -480,7 +481,7 @@ def fake_sidecar_fct( return TaskOutputData.model_validate({"some_output_key": 123}) - # NOTE: We pass another fct so it can run in our localy created dask cluster + # NOTE: We pass another fct so it can run in our locally created dask cluster # NOTE2: since there is only 1 task here, it's ok to pass the nodeID node_params = image_params.fake_tasks[node_id] assert node_params.node_requirements is not None @@ -564,6 +565,7 @@ def fake_sidecar_fct( async def test_computation_task_is_persisted_on_dask_scheduler( dask_client: DaskClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, image_params: ImageParams, _mocked_node_ports: None, @@ -578,7 +580,7 @@ async def test_computation_task_is_persisted_on_dask_scheduler( If the dask future goes out of scope, then the task is forgotten by the dask backend. So if for some reason the client gets deleted, or the director-v2, then all the futures would be deleted, thus stopping all the computations. - To aleviate this, it is possible to persist the futures directly in the dask-scheduler. + To alleviate this, it is possible to persist the futures directly in the dask-scheduler. When submitting a computation task, the future corresponding to that task is "published" on the scheduler. """ @@ -598,9 +600,10 @@ def fake_sidecar_fct( return TaskOutputData.model_validate({"some_output_key": 123}) - # NOTE: We pass another fct so it can run in our localy created dask cluster + # NOTE: We pass another fct so it can run in our locally created dask cluster published_computation_task = await dask_client.send_computation_tasks( user_id=user_id, + product_name=product_name, project_id=project_id, tasks=image_params.fake_tasks, callback=mocked_user_completed_cb, @@ -657,6 +660,7 @@ def fake_sidecar_fct( async def test_abort_computation_tasks( dask_client: DaskClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, image_params: ImageParams, _mocked_node_ports: None, @@ -698,6 +702,7 @@ def fake_remote_fct( published_computation_task = await dask_client.send_computation_tasks( user_id=user_id, + product_name=product_name, project_id=project_id, tasks=image_params.fake_tasks, callback=mocked_user_completed_cb, @@ -754,6 +759,7 @@ def fake_remote_fct( async def test_failed_task_returns_exceptions( dask_client: DaskClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, gpu_image: ImageParams, _mocked_node_ports: None, @@ -776,6 +782,7 @@ def fake_failing_sidecar_fct( published_computation_task = await dask_client.send_computation_tasks( user_id=user_id, + product_name=product_name, project_id=project_id, tasks=gpu_image.fake_tasks, callback=mocked_user_completed_cb, @@ -818,6 +825,7 @@ async def test_send_computation_task_with_missing_resources_raises( dask_spec_local_cluster: SpecCluster, dask_client: DaskClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, image_params: ImageParams, _mocked_node_ports: None, @@ -846,6 +854,7 @@ async def test_send_computation_task_with_missing_resources_raises( with pytest.raises(MissingComputationalResourcesError): await dask_client.send_computation_tasks( user_id=user_id, + product_name=product_name, project_id=project_id, tasks=image_params.fake_tasks, callback=mocked_user_completed_cb, @@ -864,6 +873,7 @@ async def test_send_computation_task_with_hardware_info_raises( dask_spec_local_cluster: SpecCluster, dask_client: DaskClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, image_params: ImageParams, _mocked_node_ports: None, @@ -877,6 +887,7 @@ async def test_send_computation_task_with_hardware_info_raises( with pytest.raises(MissingComputationalResourcesError): await dask_client.send_computation_tasks( user_id=user_id, + product_name=product_name, project_id=project_id, tasks=image_params.fake_tasks, callback=mocked_user_completed_cb, @@ -894,6 +905,7 @@ async def test_send_computation_task_with_hardware_info_raises( async def test_too_many_resources_send_computation_task( dask_client: DaskClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, node_id: NodeID, _mocked_node_ports: None, @@ -916,9 +928,10 @@ async def test_too_many_resources_send_computation_task( fake_task = {node_id: image} # let's have a big number of CPUs - with pytest.raises(InsuficientComputationalResourcesError): + with pytest.raises(InsufficientComputationalResourcesError): await dask_client.send_computation_tasks( user_id=user_id, + product_name=product_name, project_id=project_id, tasks=fake_task, callback=mocked_user_completed_cb, @@ -935,6 +948,7 @@ async def test_disconnected_backend_raises_exception( dask_spec_local_cluster: SpecCluster, dask_client: DaskClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, cpu_image: ImageParams, _mocked_node_ports: None, @@ -949,6 +963,7 @@ async def test_disconnected_backend_raises_exception( with pytest.raises(ComputationalBackendNotConnectedError): await dask_client.send_computation_tasks( user_id=user_id, + product_name=product_name, project_id=project_id, tasks=cpu_image.fake_tasks, callback=mocked_user_completed_cb, @@ -967,6 +982,7 @@ async def test_changed_scheduler_raises_exception( dask_spec_local_cluster: SpecCluster, dask_client: DaskClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, cpu_image: ImageParams, _mocked_node_ports: None, @@ -999,6 +1015,7 @@ async def test_changed_scheduler_raises_exception( with pytest.raises(ComputationalSchedulerChangedError): await dask_client.send_computation_tasks( user_id=user_id, + product_name=product_name, project_id=project_id, tasks=cpu_image.fake_tasks, callback=mocked_user_completed_cb, @@ -1014,6 +1031,7 @@ async def test_changed_scheduler_raises_exception( async def test_get_tasks_status( dask_client: DaskClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, cpu_image: ImageParams, _mocked_node_ports: None, @@ -1045,6 +1063,7 @@ def fake_remote_fct( published_computation_task = await dask_client.send_computation_tasks( user_id=user_id, + product_name=product_name, project_id=project_id, tasks=cpu_image.fake_tasks, callback=mocked_user_completed_cb, @@ -1067,7 +1086,7 @@ def fake_remote_fct( # let the remote fct run through now start_event = Event(_DASK_EVENT_NAME, dask_client.backend.client) await start_event.set() # type: ignore - # it will become successful hopefuly + # it will become successful hopefully await _assert_wait_for_task_status( published_computation_task[0].job_id, dask_client, @@ -1096,6 +1115,7 @@ async def fake_task_handlers(mocker: MockerFixture) -> TaskHandlers: async def test_dask_sub_handlers( dask_client: DaskClient, user_id: UserID, + product_name: ProductName, project_id: ProjectID, cpu_image: ImageParams, _mocked_node_ports: None, @@ -1125,6 +1145,7 @@ def fake_remote_fct( # run the computation published_computation_task = await dask_client.send_computation_tasks( user_id=user_id, + product_name=product_name, project_id=project_id, tasks=cpu_image.fake_tasks, callback=mocked_user_completed_cb, From b53c5a7563c19c0dbb6849a2bdc6b7dcdf4095a8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 19 Dec 2025 12:11:26 +0100 Subject: [PATCH 34/35] fix missing param --- .../modules/comp_scheduler/_scheduler_base.py | 7 ++++++- .../modules/comp_scheduler/_scheduler_dask.py | 7 +++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py index 9c51b2d9a9fc..af513789e3c3 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py @@ -23,6 +23,7 @@ import networkx as nx from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from common_library.user_messages import user_message +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.projects_state import RunningState @@ -516,6 +517,7 @@ async def _process_waiting_tasks( async def _update_states_from_comp_backend( self, user_id: UserID, + product_name: ProductName, project_id: ProjectID, iteration: Iteration, pipeline_dag: nx.DiGraph, @@ -559,6 +561,7 @@ async def _update_states_from_comp_backend( if sorted_tasks.completed or sorted_tasks.potentially_lost: await self._process_completed_tasks( user_id, + product_name, sorted_tasks.completed + sorted_tasks.potentially_lost, iteration, comp_run=comp_run, @@ -598,6 +601,7 @@ async def _stop_tasks( async def _process_completed_tasks( self, user_id: UserID, + product_name: ProductName, tasks: list[TaskStateTracker], iteration: Iteration, comp_run: CompRunsAtDB, @@ -621,6 +625,7 @@ async def apply( self, *, user_id: UserID, + product_name: ProductName, project_id: ProjectID, iteration: Iteration, ) -> None: @@ -640,7 +645,7 @@ async def apply( # 1. Update our list of tasks with data from backend (state, results) await self._update_states_from_comp_backend( - user_id, project_id, iteration, dag, comp_run + user_id, product_name, project_id, iteration, dag, comp_run ) # 1.1. get the updated tasks NOTE: we need to get them again as some states might have changed comp_tasks = await self._get_pipeline_tasks(project_id, dag) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py index c9550fb78d03..2aded3a46545 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py @@ -14,6 +14,7 @@ from dask_task_models_library.container_tasks.io import TaskOutputData from models_library.clusters import BaseCluster from models_library.errors import ErrorDict +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState @@ -125,6 +126,7 @@ async def _start_tasks( self, *, user_id: UserID, + product_name: ProductName, project_id: ProjectID, scheduled_tasks: dict[NodeID, CompTaskAtDB], comp_run: CompRunsAtDB, @@ -152,6 +154,7 @@ async def _start_tasks( for node_id, task in scheduled_tasks.items(): published_tasks = await client.send_computation_tasks( user_id=user_id, + product_name=product_name, project_id=project_id, tasks={node_id: task.image}, hardware_info=task.hardware_info, @@ -295,6 +298,7 @@ async def _stop_tasks( async def _process_completed_tasks( self, user_id: UserID, + product_name: ProductName, tasks: list[TaskStateTracker], iteration: Iteration, comp_run: CompRunsAtDB, @@ -323,6 +327,7 @@ async def _process_completed_tasks( result, iteration, comp_run, + product_name, ) for task, result in zip(tasks, tasks_results, strict=True) ), @@ -503,6 +508,7 @@ async def _process_task_result( result: BaseException | TaskOutputData, iteration: Iteration, comp_run: CompRunsAtDB, + product_name: ProductName, ) -> tuple[bool, str | None]: """Returns True and the job ID if the task was successfully processed and can be released from the Dask cluster.""" with log_context( @@ -557,6 +563,7 @@ async def _process_task_result( await clean_task_output_and_log_files_if_invalid( self.db_engine, comp_run.user_id, + product_name, comp_run.project_uuid, task.current.node_id, ) From 80c67a8daf90ca38b586aa6a7384f4326eb8475a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 19 Dec 2025 12:30:00 +0100 Subject: [PATCH 35/35] fix base class --- .../modules/comp_scheduler/_scheduler_base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py index af513789e3c3..59e6e645e01b 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py @@ -578,6 +578,7 @@ async def _start_tasks( self, *, user_id: UserID, + product_name: ProductName, project_id: ProjectID, scheduled_tasks: dict[NodeID, CompTaskAtDB], comp_run: CompRunsAtDB,