From eefd65c01611190457d8350d847208af985cb62f Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Fri, 28 Nov 2025 15:41:46 +0100 Subject: [PATCH 01/13] remove V2 from `fractal_server/app/schemas/` --- benchmarks/populate_db/populate_db_script.py | 68 ++++++------ fractal_server/app/models/v2/job.py | 4 +- fractal_server/app/routes/admin/v2/job.py | 30 +++--- .../app/routes/admin/v2/task_group.py | 22 ++-- .../routes/admin/v2/task_group_lifecycle.py | 40 +++---- .../app/routes/api/v2/_aux_functions.py | 10 +- .../api/v2/_aux_functions_task_lifecycle.py | 8 +- .../app/routes/api/v2/_aux_functions_tasks.py | 4 +- fractal_server/app/routes/api/v2/dataset.py | 40 +++---- fractal_server/app/routes/api/v2/job.py | 24 ++--- fractal_server/app/routes/api/v2/project.py | 22 ++-- .../app/routes/api/v2/status_legacy.py | 18 ++-- fractal_server/app/routes/api/v2/submit.py | 14 +-- fractal_server/app/routes/api/v2/task.py | 32 +++--- .../app/routes/api/v2/task_collection.py | 36 +++---- .../routes/api/v2/task_collection_custom.py | 24 ++--- .../app/routes/api/v2/task_collection_pixi.py | 18 ++-- .../app/routes/api/v2/task_group.py | 36 +++---- .../app/routes/api/v2/task_group_lifecycle.py | 52 ++++----- .../app/routes/api/v2/task_version_update.py | 10 +- fractal_server/app/routes/api/v2/workflow.py | 36 +++---- .../app/routes/api/v2/workflow_import.py | 22 ++-- .../app/routes/api/v2/workflowtask.py | 20 ++-- fractal_server/app/schemas/v2/__init__.py | 100 +++++++++--------- fractal_server/app/schemas/v2/dataset.py | 20 ++-- fractal_server/app/schemas/v2/dumps.py | 18 ++-- fractal_server/app/schemas/v2/job.py | 22 ++-- fractal_server/app/schemas/v2/manifest.py | 28 ++--- fractal_server/app/schemas/v2/project.py | 6 +- .../app/schemas/v2/status_legacy.py | 6 +- fractal_server/app/schemas/v2/task.py | 12 +-- .../app/schemas/v2/task_collection.py | 12 +-- fractal_server/app/schemas/v2/task_group.py | 32 +++--- fractal_server/app/schemas/v2/workflow.py | 32 +++--- fractal_server/app/schemas/v2/workflowtask.py | 28 ++--- fractal_server/app/shutdown.py | 6 +- fractal_server/data_migrations/old/2_0_3.py | 4 +- fractal_server/data_migrations/old/2_11_0.py | 20 ++-- .../json_schemas/generate_manifest_v2.py | 2 +- fractal_server/runner/v2/runner.py | 8 +- fractal_server/runner/v2/submit_workflow.py | 6 +- fractal_server/tasks/v2/local/_utils.py | 4 +- fractal_server/tasks/v2/local/collect.py | 15 ++- fractal_server/tasks/v2/local/collect_pixi.py | 15 ++- fractal_server/tasks/v2/local/deactivate.py | 14 +-- .../tasks/v2/local/deactivate_pixi.py | 6 +- fractal_server/tasks/v2/local/delete.py | 10 +- fractal_server/tasks/v2/local/reactivate.py | 10 +- .../tasks/v2/local/reactivate_pixi.py | 10 +- fractal_server/tasks/v2/ssh/collect.py | 14 +-- fractal_server/tasks/v2/ssh/collect_pixi.py | 14 +-- fractal_server/tasks/v2/ssh/deactivate.py | 14 +-- .../tasks/v2/ssh/deactivate_pixi.py | 4 +- fractal_server/tasks/v2/ssh/delete.py | 10 +- fractal_server/tasks/v2/ssh/reactivate.py | 10 +- .../tasks/v2/ssh/reactivate_pixi.py | 10 +- fractal_server/tasks/v2/utils_background.py | 18 ++-- fractal_server/tasks/v2/utils_database.py | 6 +- scripts/atomicity-test/tmp.py | 6 +- scripts/client/client.py | 62 +++++------ scripts/db_performance/create_dbs.py | 20 ++-- scripts/merge-dbs/create_mock_db.py | 68 ++++++------ scripts/validate_db_data_with_read_schemas.py | 40 +++---- tests/fixtures_tasks_v2.py | 12 +-- .../test_01_schemas/test_schemas_dataset.py | 38 +++---- .../test_01_schemas/test_schemas_manifest.py | 60 +++++------ .../test_01_schemas/test_schemas_workflow.py | 32 +++--- .../test_01_schemas/test_task_collection.py | 74 ++++++------- .../test_unit_json_schemas_v2.py | 4 +- .../test_01_schemas/test_unit_schemas_v2.py | 80 +++++++------- tests/v2/test_02_models/test_tasks_v2.py | 4 +- tests/v2/test_03_api/admin/test_admin_job.py | 10 +- .../test_03_api/admin/test_admin_taskgroup.py | 56 +++++----- tests/v2/test_03_api/test_api_dataset.py | 16 ++- tests/v2/test_03_api/test_api_history.py | 8 +- tests/v2/test_03_api/test_api_job.py | 12 +-- tests/v2/test_03_api/test_api_project.py | 12 +-- tests/v2/test_03_api/test_api_task.py | 24 ++--- tests/v2/test_03_api/test_api_task_group.py | 24 ++--- tests/v2/test_03_api/test_api_workflow.py | 10 +- .../test_api_workflow_import_export.py | 14 +-- .../v2/test_03_api/test_api_workflow_task.py | 8 +- .../test_unit_submit_workflow.py | 6 +- .../test_api_task_collection.py | 12 +-- .../test_api_task_collection_custom.py | 12 +-- .../test_api_task_lifecycle.py | 46 ++++---- .../test_collect_local.py | 18 ++-- .../test_collect_pixi_local.py | 8 +- .../test_deactivate_local.py | 38 +++---- .../test_deactivate_ssh.py | 38 +++---- .../test_reactivate_local.py | 12 +-- .../test_reactivate_ssh.py | 12 +-- .../test_unit_aux_functions_task_lifecycle.py | 4 +- 93 files changed, 1008 insertions(+), 1028 deletions(-) diff --git a/benchmarks/populate_db/populate_db_script.py b/benchmarks/populate_db/populate_db_script.py index dc525e5161..8f6e1d8be5 100644 --- a/benchmarks/populate_db/populate_db_script.py +++ b/benchmarks/populate_db/populate_db_script.py @@ -1,9 +1,9 @@ from fractal_server.app.schemas.user import UserCreate -from fractal_server.app.schemas.v2 import DatasetImportV2 -from fractal_server.app.schemas.v2 import JobCreateV2 -from fractal_server.app.schemas.v2 import ProjectCreateV2 -from fractal_server.app.schemas.v2 import WorkflowCreateV2 -from fractal_server.app.schemas.v2 import WorkflowTaskCreateV2 +from fractal_server.app.schemas.v2 import DatasetImport +from fractal_server.app.schemas.v2 import JobCreate +from fractal_server.app.schemas.v2 import ProjectCreate +from fractal_server.app.schemas.v2 import WorkflowCreate +from fractal_server.app.schemas.v2 import WorkflowTaskCreate from scripts.client import FractalClient @@ -61,19 +61,17 @@ def _user_flow_vanilla( working_task_id: int, ): user = _create_user_client(admin, user_identifier="vanilla") - proj = user.add_project(ProjectCreateV2(name="MyProject_uv")) + proj = user.add_project(ProjectCreate(name="MyProject_uv")) image_list = create_image_list(n_images=10) ds = user.import_dataset( proj.id, - DatasetImportV2( + DatasetImport( name="MyDataset", zarr_dir="/invalid/zarr", images=image_list ), ) - wf = user.add_workflow(proj.id, WorkflowCreateV2(name="MyWorkflow")) - user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() - ) - user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreateV2()) + wf = user.add_workflow(proj.id, WorkflowCreate(name="MyWorkflow")) + user.add_workflowtask(proj.id, wf.id, working_task_id, WorkflowTaskCreate()) + user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreate()) # power user: @@ -88,29 +86,29 @@ def _user_flow_power( failing_task_id: int, ): user = _create_user_client(admin, user_identifier="power") - proj = user.add_project(ProjectCreateV2(name="MyProject_upw")) + proj = user.add_project(ProjectCreate(name="MyProject_upw")) # we add also a dataset with images image_list = create_image_list(n_images=100) num_workflows = 20 num_jobs_per_workflow = 20 for ind_wf in range(num_workflows): wf = user.add_workflow( - proj.id, WorkflowCreateV2(name=f"MyWorkflow-{ind_wf}") + proj.id, WorkflowCreate(name=f"MyWorkflow-{ind_wf}") ) user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() + proj.id, wf.id, working_task_id, WorkflowTaskCreate() ) if ind_wf % 2 == 0: user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() + proj.id, wf.id, working_task_id, WorkflowTaskCreate() ) user.add_workflowtask( - proj.id, wf.id, failing_task_id, WorkflowTaskCreateV2() + proj.id, wf.id, failing_task_id, WorkflowTaskCreate() ) for ind_job in range(num_jobs_per_workflow): ds = user.import_dataset( proj.id, - DatasetImportV2( + DatasetImport( name="MyDataset", zarr_dir="/invalid/zarr", images=image_list, @@ -120,7 +118,7 @@ def _user_flow_power( proj.id, wf.id, ds.id, - applyworkflow=JobCreateV2(), + applyworkflow=JobCreate(), ) @@ -134,14 +132,14 @@ def _user_flow_dataset( working_task_id: int, ): user = _create_user_client(admin, user_identifier="dataset") - proj = user.add_project(ProjectCreateV2(name="MyProject_us")) + proj = user.add_project(ProjectCreate(name="MyProject_us")) image_list = create_image_list(n_images=1000) n_datasets = 20 ds_list = [] for i in range(n_datasets): ds = user.import_dataset( proj.id, - DatasetImportV2( + DatasetImport( name=f"MyDataset_us-{i}", zarr_dir="/invalid/zarr", images=image_list, @@ -152,17 +150,17 @@ def _user_flow_dataset( num_workflows = 20 for i in range(num_workflows): wf = user.add_workflow( - proj.id, WorkflowCreateV2(name=f"MyWorkflow_us-{i}") + proj.id, WorkflowCreate(name=f"MyWorkflow_us-{i}") ) user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() + proj.id, wf.id, working_task_id, WorkflowTaskCreate() ) for ds in ds_list: user.submit_job( proj.id, wf.id, ds.id, - applyworkflow=JobCreateV2(), + applyworkflow=JobCreate(), ) @@ -180,27 +178,27 @@ def _user_flow_project( num_jobs_per_workflow = 5 image_list = create_image_list(100) for i in range(n_projects): - proj = user.add_project(ProjectCreateV2(name=f"MyProject_upj-{i}")) + proj = user.add_project(ProjectCreate(name=f"MyProject_upj-{i}")) ds = user.import_dataset( proj.id, - DatasetImportV2( + DatasetImport( name=f"MyDataset_up-{i}", zarr_dir="/invalid/zarr", images=image_list, ), ) wf = user.add_workflow( - proj.id, WorkflowCreateV2(name=f"MyWorkflow_up-{i}") + proj.id, WorkflowCreate(name=f"MyWorkflow_up-{i}") ) user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() + proj.id, wf.id, working_task_id, WorkflowTaskCreate() ) for i in range(num_jobs_per_workflow): user.submit_job( proj.id, wf.id, ds.id, - applyworkflow=JobCreateV2(), + applyworkflow=JobCreate(), ) @@ -214,21 +212,19 @@ def _user_flow_job( working_task_id: int, ): user = _create_user_client(admin, user_identifier="job") - proj = user.add_project(ProjectCreateV2(name="MyProject_uj")) + proj = user.add_project(ProjectCreate(name="MyProject_uj")) image_list = create_image_list(n_images=10) ds = user.import_dataset( proj.id, - DatasetImportV2( + DatasetImport( name="MyDataset", zarr_dir="/invalid/zarr", images=image_list ), ) - wf = user.add_workflow(proj.id, WorkflowCreateV2(name="MyWorkflow_uj")) - user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() - ) + wf = user.add_workflow(proj.id, WorkflowCreate(name="MyWorkflow_uj")) + user.add_workflowtask(proj.id, wf.id, working_task_id, WorkflowTaskCreate()) num_jobs_per_workflow = 100 for i in range(num_jobs_per_workflow): - user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreateV2()) + user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreate()) if __name__ == "__main__": diff --git a/fractal_server/app/models/v2/job.py b/fractal_server/app/models/v2/job.py index 87ee171172..6920d7df02 100644 --- a/fractal_server/app/models/v2/job.py +++ b/fractal_server/app/models/v2/job.py @@ -8,7 +8,7 @@ from sqlmodel import Field from sqlmodel import SQLModel -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType from fractal_server.utils import get_timestamp @@ -56,7 +56,7 @@ class JobV2(SQLModel, table=True): end_timestamp: datetime | None = Field( default=None, sa_column=Column(DateTime(timezone=True)) ) - status: str = JobStatusTypeV2.SUBMITTED + status: str = JobStatusType.SUBMITTED log: str | None = None executor_error_log: str | None = None diff --git a/fractal_server/app/routes/admin/v2/job.py b/fractal_server/app/routes/admin/v2/job.py index 4675abbd09..ad491a877c 100644 --- a/fractal_server/app/routes/admin/v2/job.py +++ b/fractal_server/app/routes/admin/v2/job.py @@ -24,9 +24,9 @@ from fractal_server.app.routes.pagination import PaginationResponse from fractal_server.app.routes.pagination import get_pagination_params from fractal_server.app.schemas.v2 import HistoryUnitStatus -from fractal_server.app.schemas.v2 import JobReadV2 -from fractal_server.app.schemas.v2 import JobStatusTypeV2 -from fractal_server.app.schemas.v2 import JobUpdateV2 +from fractal_server.app.schemas.v2 import JobRead +from fractal_server.app.schemas.v2 import JobStatusType +from fractal_server.app.schemas.v2 import JobUpdate from fractal_server.runner.filenames import WORKFLOW_LOG_FILENAME from fractal_server.utils import get_timestamp from fractal_server.zip_tools import _zip_folder_to_byte_stream_iterator @@ -34,14 +34,14 @@ router = APIRouter() -@router.get("/", response_model=PaginationResponse[JobReadV2]) +@router.get("/", response_model=PaginationResponse[JobRead]) async def view_job( id: int | None = None, user_id: int | None = None, project_id: int | None = None, dataset_id: int | None = None, workflow_id: int | None = None, - status: JobStatusTypeV2 | None = None, + status: JobStatusType | None = None, start_timestamp_min: AwareDatetime | None = None, start_timestamp_max: AwareDatetime | None = None, end_timestamp_min: AwareDatetime | None = None, @@ -50,7 +50,7 @@ async def view_job( pagination: PaginationRequest = Depends(get_pagination_params), user: UserOAuth = Depends(current_superuser_act), db: AsyncSession = Depends(get_async_db), -) -> PaginationResponse[JobReadV2]: +) -> PaginationResponse[JobRead]: """ Query `JobV2` table. @@ -154,13 +154,13 @@ async def view_job( ) -@router.get("/{job_id}/", response_model=JobReadV2) +@router.get("/{job_id}/", response_model=JobRead) async def view_single_job( job_id: int, show_tmp_logs: bool = False, user: UserOAuth = Depends(current_superuser_act), db: AsyncSession = Depends(get_async_db), -) -> JobReadV2: +) -> JobRead: job = await db.get(JobV2, job_id) if not job: raise HTTPException( @@ -169,7 +169,7 @@ async def view_single_job( ) await db.close() - if show_tmp_logs and (job.status == JobStatusTypeV2.SUBMITTED): + if show_tmp_logs and (job.status == JobStatusType.SUBMITTED): try: with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}") as f: job.log = f.read() @@ -179,13 +179,13 @@ async def view_single_job( return job -@router.patch("/{job_id}/", response_model=JobReadV2) +@router.patch("/{job_id}/", response_model=JobRead) async def update_job( - job_update: JobUpdateV2, + job_update: JobUpdate, job_id: int, user: UserOAuth = Depends(current_superuser_act), db: AsyncSession = Depends(get_async_db), -) -> JobReadV2 | None: +) -> JobRead | None: """ Change the status of an existing job. @@ -198,13 +198,13 @@ async def update_job( status_code=status.HTTP_404_NOT_FOUND, detail=f"Job {job_id} not found", ) - if job.status != JobStatusTypeV2.SUBMITTED: + if job.status != JobStatusType.SUBMITTED: raise HTTPException( status_code=status.HTTP_422_UNPROCESSABLE_CONTENT, detail=f"Job {job_id} has status {job.status=} != 'submitted'.", ) - if job_update.status != JobStatusTypeV2.FAILED: + if job_update.status != JobStatusType.FAILED: raise HTTPException( status_code=status.HTTP_422_UNPROCESSABLE_CONTENT, detail=f"Cannot set job status to {job_update.status}", @@ -217,7 +217,7 @@ async def update_job( job, "log", f"{job.log or ''}\nThis job was manually marked as " - f"'{JobStatusTypeV2.FAILED}' by an admin ({timestamp.isoformat()}).", + f"'{JobStatusType.FAILED}' by an admin ({timestamp.isoformat()}).", ) res = await db.execute( diff --git a/fractal_server/app/routes/admin/v2/task_group.py b/fractal_server/app/routes/admin/v2/task_group.py index af5b4d7e78..7d0e9a3411 100644 --- a/fractal_server/app/routes/admin/v2/task_group.py +++ b/fractal_server/app/routes/admin/v2/task_group.py @@ -20,12 +20,12 @@ from fractal_server.app.routes.pagination import PaginationRequest from fractal_server.app.routes.pagination import PaginationResponse from fractal_server.app.routes.pagination import get_pagination_params -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityRead +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum from fractal_server.app.schemas.v2 import TaskGroupReadSuperuser -from fractal_server.app.schemas.v2 import TaskGroupUpdateV2 -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum +from fractal_server.app.schemas.v2 import TaskGroupUpdate from fractal_server.logger import set_logger router = APIRouter() @@ -34,20 +34,20 @@ @router.get( - "/activity/", response_model=PaginationResponse[TaskGroupActivityV2Read] + "/activity/", response_model=PaginationResponse[TaskGroupActivityRead] ) async def get_task_group_activity_list( task_group_activity_id: int | None = None, user_id: int | None = None, taskgroupv2_id: int | None = None, pkg_name: str | None = None, - status: TaskGroupActivityStatusV2 | None = None, - action: TaskGroupActivityActionV2 | None = None, + status: TaskGroupActivityStatus | None = None, + action: TaskGroupActivityAction | None = None, timestamp_started_min: AwareDatetime | None = None, pagination: PaginationRequest = Depends(get_pagination_params), superuser: UserOAuth = Depends(current_superuser_act), db: AsyncSession = Depends(get_async_db), -) -> PaginationResponse[TaskGroupActivityV2Read]: +) -> PaginationResponse[TaskGroupActivityRead]: # Assign pagination parameters page = pagination.page page_size = pagination.page_size @@ -127,7 +127,7 @@ async def query_task_group_list( private: bool | None = None, active: bool | None = None, pkg_name: str | None = None, - origin: TaskGroupV2OriginEnum | None = None, + origin: TaskGroupOriginEnum | None = None, timestamp_last_used_min: AwareDatetime | None = None, timestamp_last_used_max: AwareDatetime | None = None, resource_id: int | None = None, @@ -217,7 +217,7 @@ async def query_task_group_list( @router.patch("/{task_group_id}/", response_model=TaskGroupReadSuperuser) async def patch_task_group( task_group_id: int, - task_group_update: TaskGroupUpdateV2, + task_group_update: TaskGroupUpdate, user: UserOAuth = Depends(current_superuser_act), db: AsyncSession = Depends(get_async_db), ) -> list[TaskGroupReadSuperuser]: diff --git a/fractal_server/app/routes/admin/v2/task_group_lifecycle.py b/fractal_server/app/routes/admin/v2/task_group_lifecycle.py index f219f6795a..570d432669 100644 --- a/fractal_server/app/routes/admin/v2/task_group_lifecycle.py +++ b/fractal_server/app/routes/admin/v2/task_group_lifecycle.py @@ -26,10 +26,10 @@ validate_user_profile, ) from fractal_server.app.schemas.v2 import ResourceType -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityRead +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum from fractal_server.logger import set_logger from fractal_server.tasks.v2.local import deactivate_local from fractal_server.tasks.v2.local import delete_local @@ -46,7 +46,7 @@ @router.post( "/{task_group_id}/deactivate/", - response_model=TaskGroupActivityV2Read, + response_model=TaskGroupActivityRead, ) async def deactivate_task_group( task_group_id: int, @@ -54,7 +54,7 @@ async def deactivate_task_group( response: Response, superuser: UserOAuth = Depends(current_superuser_act), db: AsyncSession = Depends(get_async_db), -) -> TaskGroupActivityV2Read: +) -> TaskGroupActivityRead: """ Deactivate task-group venv """ @@ -78,13 +78,13 @@ async def deactivate_task_group( await check_no_submitted_job(task_group_id=task_group.id, db=db) # Shortcut for task-group with origin="other" - if task_group.origin == TaskGroupV2OriginEnum.OTHER: + if task_group.origin == TaskGroupOriginEnum.OTHER: task_group.active = False task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.OK, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.OK, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name=task_group.pkg_name, version=(task_group.version or "N/A"), log=( @@ -103,8 +103,8 @@ async def deactivate_task_group( task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, timestamp_started=get_timestamp(), @@ -140,7 +140,7 @@ async def deactivate_task_group( @router.post( "/{task_group_id}/reactivate/", - response_model=TaskGroupActivityV2Read, + response_model=TaskGroupActivityRead, ) async def reactivate_task_group( task_group_id: int, @@ -148,7 +148,7 @@ async def reactivate_task_group( response: Response, superuser: UserOAuth = Depends(current_superuser_act), db: AsyncSession = Depends(get_async_db), -) -> TaskGroupActivityV2Read: +) -> TaskGroupActivityRead: """ Deactivate task-group venv """ @@ -173,13 +173,13 @@ async def reactivate_task_group( await check_no_submitted_job(task_group_id=task_group.id, db=db) # Shortcut for task-group with origin="other" - if task_group.origin == TaskGroupV2OriginEnum.OTHER: + if task_group.origin == TaskGroupOriginEnum.OTHER: task_group.active = True task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.OK, - action=TaskGroupActivityActionV2.REACTIVATE, + status=TaskGroupActivityStatus.OK, + action=TaskGroupActivityAction.REACTIVATE, pkg_name=task_group.pkg_name, version=(task_group.version or "N/A"), log=( @@ -206,8 +206,8 @@ async def reactivate_task_group( task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.REACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.REACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, timestamp_started=get_timestamp(), @@ -259,8 +259,8 @@ async def delete_task_group( task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DELETE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DELETE, pkg_name=task_group.pkg_name, version=(task_group.version or "N/A"), timestamp_started=get_timestamp(), diff --git a/fractal_server/app/routes/api/v2/_aux_functions.py b/fractal_server/app/routes/api/v2/_aux_functions.py index ffbf077628..1d49c1b384 100644 --- a/fractal_server/app/routes/api/v2/_aux_functions.py +++ b/fractal_server/app/routes/api/v2/_aux_functions.py @@ -23,7 +23,7 @@ from fractal_server.app.models.v2 import TaskV2 from fractal_server.app.models.v2 import WorkflowTaskV2 from fractal_server.app.models.v2 import WorkflowV2 -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType from fractal_server.app.schemas.v2 import ProjectPermissions from fractal_server.logger import set_logger @@ -361,7 +361,7 @@ def _get_submitted_jobs_statement() -> SelectOfScalar: A sqlmodel statement that selects all `Job`s with `Job.status` equal to `submitted`. """ - stm = select(JobV2).where(JobV2.status == JobStatusTypeV2.SUBMITTED) + stm = select(JobV2).where(JobV2.status == JobStatusType.SUBMITTED) return stm @@ -371,7 +371,7 @@ async def _workflow_has_submitted_job( ) -> bool: res = await db.execute( select(JobV2.id) - .where(JobV2.status == JobStatusTypeV2.SUBMITTED) + .where(JobV2.status == JobStatusType.SUBMITTED) .where(JobV2.workflow_id == workflow_id) .limit(1) ) @@ -470,9 +470,7 @@ async def clean_app_job_list_v2( result = await db.execute(stmt) db_jobs_list = result.scalars().all() submitted_job_ids = [ - job.id - for job in db_jobs_list - if job.status == JobStatusTypeV2.SUBMITTED + job.id for job in db_jobs_list if job.status == JobStatusType.SUBMITTED ] return submitted_job_ids diff --git a/fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py b/fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py index 79dad3f241..6f6f09e454 100644 --- a/fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +++ b/fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py @@ -14,8 +14,8 @@ from fractal_server.app.models.v2 import TaskV2 from fractal_server.app.models.v2 import WorkflowTaskV2 from fractal_server.app.models.v2 import WorkflowV2 -from fractal_server.app.schemas.v2 import JobStatusTypeV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import JobStatusType +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus from fractal_server.logger import set_logger from fractal_server.tasks.v2.utils_package_names import normalize_package_name @@ -171,7 +171,7 @@ async def check_no_ongoing_activity( stm = ( select(TaskGroupActivityV2) .where(TaskGroupActivityV2.taskgroupv2_id == task_group_id) - .where(TaskGroupActivityV2.status == TaskGroupActivityStatusV2.ONGOING) + .where(TaskGroupActivityV2.status == TaskGroupActivityStatus.ONGOING) ) res = await db.execute(stm) ongoing_activities = res.scalars().all() @@ -213,7 +213,7 @@ async def check_no_submitted_job( .join(TaskV2, WorkflowTaskV2.task_id == TaskV2.id) .where(WorkflowTaskV2.order >= JobV2.first_task_index) .where(WorkflowTaskV2.order <= JobV2.last_task_index) - .where(JobV2.status == JobStatusTypeV2.SUBMITTED) + .where(JobV2.status == JobStatusType.SUBMITTED) .where(TaskV2.taskgroupv2_id == task_group_id) ) res = await db.execute(stm) diff --git a/fractal_server/app/routes/api/v2/_aux_functions_tasks.py b/fractal_server/app/routes/api/v2/_aux_functions_tasks.py index 3d203e9358..c1849e5e2d 100644 --- a/fractal_server/app/routes/api/v2/_aux_functions_tasks.py +++ b/fractal_server/app/routes/api/v2/_aux_functions_tasks.py @@ -27,7 +27,7 @@ from fractal_server.app.routes.auth._aux_auth import ( _verify_user_belongs_to_group, ) -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction from fractal_server.images.tools import merge_type_filters from fractal_server.logger import set_logger @@ -252,7 +252,7 @@ async def _get_collection_task_group_activity_status_message( res = await db.execute( select(TaskGroupActivityV2) .where(TaskGroupActivityV2.taskgroupv2_id == task_group_id) - .where(TaskGroupActivityV2.action == TaskGroupActivityActionV2.COLLECT) + .where(TaskGroupActivityV2.action == TaskGroupActivityAction.COLLECT) ) task_group_activity_list = res.scalars().all() if len(task_group_activity_list) > 1: diff --git a/fractal_server/app/routes/api/v2/dataset.py b/fractal_server/app/routes/api/v2/dataset.py index d09bdab38a..6cd86b74db 100644 --- a/fractal_server/app/routes/api/v2/dataset.py +++ b/fractal_server/app/routes/api/v2/dataset.py @@ -13,11 +13,11 @@ from fractal_server.app.models.v2 import DatasetV2 from fractal_server.app.models.v2 import JobV2 from fractal_server.app.routes.auth import current_user_act_ver_prof -from fractal_server.app.schemas.v2 import DatasetCreateV2 -from fractal_server.app.schemas.v2 import DatasetReadV2 -from fractal_server.app.schemas.v2 import DatasetUpdateV2 -from fractal_server.app.schemas.v2.dataset import DatasetExportV2 -from fractal_server.app.schemas.v2.dataset import DatasetImportV2 +from fractal_server.app.schemas.v2 import DatasetCreate +from fractal_server.app.schemas.v2 import DatasetRead +from fractal_server.app.schemas.v2 import DatasetUpdate +from fractal_server.app.schemas.v2.dataset import DatasetExport +from fractal_server.app.schemas.v2.dataset import DatasetImport from fractal_server.app.schemas.v2.sharing import ProjectPermissions from fractal_server.string_tools import sanitize_string from fractal_server.urls import normalize_url @@ -31,15 +31,15 @@ @router.post( "/project/{project_id}/dataset/", - response_model=DatasetReadV2, + response_model=DatasetRead, status_code=status.HTTP_201_CREATED, ) async def create_dataset( project_id: int, - dataset: DatasetCreateV2, + dataset: DatasetCreate, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> DatasetReadV2 | None: +) -> DatasetRead | None: """ Add new dataset to current project """ @@ -93,13 +93,13 @@ async def create_dataset( @router.get( "/project/{project_id}/dataset/", - response_model=list[DatasetReadV2], + response_model=list[DatasetRead], ) async def read_dataset_list( project_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> list[DatasetReadV2] | None: +) -> list[DatasetRead] | None: """ Get dataset list for given project """ @@ -122,14 +122,14 @@ async def read_dataset_list( @router.get( "/project/{project_id}/dataset/{dataset_id}/", - response_model=DatasetReadV2, + response_model=DatasetRead, ) async def read_dataset( project_id: int, dataset_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> DatasetReadV2 | None: +) -> DatasetRead | None: """ Get info on a dataset associated to the current project """ @@ -146,15 +146,15 @@ async def read_dataset( @router.patch( "/project/{project_id}/dataset/{dataset_id}/", - response_model=DatasetReadV2, + response_model=DatasetRead, ) async def update_dataset( project_id: int, dataset_id: int, - dataset_update: DatasetUpdateV2, + dataset_update: DatasetUpdate, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> DatasetReadV2 | None: +) -> DatasetRead | None: """ Edit a dataset associated to the current project """ @@ -243,14 +243,14 @@ async def delete_dataset( @router.get( "/project/{project_id}/dataset/{dataset_id}/export/", - response_model=DatasetExportV2, + response_model=DatasetExport, ) async def export_dataset( project_id: int, dataset_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> DatasetExportV2 | None: +) -> DatasetExport | None: """ Export an existing dataset """ @@ -268,15 +268,15 @@ async def export_dataset( @router.post( "/project/{project_id}/dataset/import/", - response_model=DatasetReadV2, + response_model=DatasetRead, status_code=status.HTTP_201_CREATED, ) async def import_dataset( project_id: int, - dataset: DatasetImportV2, + dataset: DatasetImport, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> DatasetReadV2 | None: +) -> DatasetRead | None: """ Import an existing dataset into a project """ diff --git a/fractal_server/app/routes/api/v2/job.py b/fractal_server/app/routes/api/v2/job.py index 829f235aab..07d8c891fe 100644 --- a/fractal_server/app/routes/api/v2/job.py +++ b/fractal_server/app/routes/api/v2/job.py @@ -18,8 +18,8 @@ from fractal_server.app.routes.auth import current_user_act_ver_prof from fractal_server.app.routes.aux._job import _write_shutdown_file from fractal_server.app.routes.aux._runner import _check_shutdown_is_supported -from fractal_server.app.schemas.v2 import JobReadV2 -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobRead +from fractal_server.app.schemas.v2 import JobStatusType from fractal_server.app.schemas.v2.sharing import ProjectPermissions from fractal_server.runner.filenames import WORKFLOW_LOG_FILENAME from fractal_server.zip_tools import _zip_folder_to_byte_stream_iterator @@ -39,12 +39,12 @@ async def zip_folder_threaded(folder: str) -> Iterator[bytes]: router = APIRouter() -@router.get("/job/", response_model=list[JobReadV2]) +@router.get("/job/", response_model=list[JobRead]) async def get_user_jobs( user: UserOAuth = Depends(current_user_act_ver_prof), log: bool = True, db: AsyncSession = Depends(get_async_db), -) -> list[JobReadV2]: +) -> list[JobRead]: """ Returns all the jobs of the current user """ @@ -68,14 +68,14 @@ async def get_user_jobs( @router.get( "/project/{project_id}/workflow/{workflow_id}/job/", - response_model=list[JobReadV2], + response_model=list[JobRead], ) async def get_workflow_jobs( project_id: int, workflow_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> list[JobReadV2] | None: +) -> list[JobRead] | None: """ Returns all the jobs related to a specific workflow """ @@ -99,7 +99,7 @@ async def get_latest_job( dataset_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> JobReadV2: +) -> JobRead: await _get_workflow_check_access( project_id=project_id, workflow_id=workflow_id, @@ -127,7 +127,7 @@ async def get_latest_job( @router.get( "/project/{project_id}/job/{job_id}/", - response_model=JobReadV2, + response_model=JobRead, ) async def read_job( project_id: int, @@ -135,7 +135,7 @@ async def read_job( show_tmp_logs: bool = False, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> JobReadV2 | None: +) -> JobRead | None: """ Return info on an existing job """ @@ -150,7 +150,7 @@ async def read_job( job = output["job"] await db.close() - if show_tmp_logs and (job.status == JobStatusTypeV2.SUBMITTED): + if show_tmp_logs and (job.status == JobStatusType.SUBMITTED): try: with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}") as f: job.log = f.read() @@ -194,14 +194,14 @@ async def download_job_logs( @router.get( "/project/{project_id}/job/", - response_model=list[JobReadV2], + response_model=list[JobRead], ) async def get_job_list( project_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), log: bool = True, db: AsyncSession = Depends(get_async_db), -) -> list[JobReadV2] | None: +) -> list[JobRead] | None: """ Get job list for given project """ diff --git a/fractal_server/app/routes/api/v2/project.py b/fractal_server/app/routes/api/v2/project.py index 445fe507df..83a52a3a63 100644 --- a/fractal_server/app/routes/api/v2/project.py +++ b/fractal_server/app/routes/api/v2/project.py @@ -15,10 +15,10 @@ from fractal_server.app.routes.aux.validate_user_profile import ( validate_user_profile, ) -from fractal_server.app.schemas.v2 import ProjectCreateV2 +from fractal_server.app.schemas.v2 import ProjectCreate from fractal_server.app.schemas.v2 import ProjectPermissions -from fractal_server.app.schemas.v2 import ProjectReadV2 -from fractal_server.app.schemas.v2 import ProjectUpdateV2 +from fractal_server.app.schemas.v2 import ProjectRead +from fractal_server.app.schemas.v2 import ProjectUpdate from fractal_server.logger import set_logger from ._aux_functions import _check_project_exists @@ -29,7 +29,7 @@ router = APIRouter() -@router.get("/project/", response_model=list[ProjectReadV2]) +@router.get("/project/", response_model=list[ProjectRead]) async def get_list_project( is_owner: bool = True, user: UserOAuth = Depends(current_user_act_ver_prof), @@ -51,12 +51,12 @@ async def get_list_project( return project_list -@router.post("/project/", response_model=ProjectReadV2, status_code=201) +@router.post("/project/", response_model=ProjectRead, status_code=201) async def create_project( - project: ProjectCreateV2, + project: ProjectCreate, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> ProjectReadV2 | None: +) -> ProjectRead | None: """ Create new project """ @@ -92,12 +92,12 @@ async def create_project( return db_project -@router.get("/project/{project_id}/", response_model=ProjectReadV2) +@router.get("/project/{project_id}/", response_model=ProjectRead) async def read_project( project_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> ProjectReadV2 | None: +) -> ProjectRead | None: """ Return info on an existing project """ @@ -111,10 +111,10 @@ async def read_project( return project -@router.patch("/project/{project_id}/", response_model=ProjectReadV2) +@router.patch("/project/{project_id}/", response_model=ProjectRead) async def update_project( project_id: int, - project_update: ProjectUpdateV2, + project_update: ProjectUpdate, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), ): diff --git a/fractal_server/app/routes/api/v2/status_legacy.py b/fractal_server/app/routes/api/v2/status_legacy.py index 508bf3c9d6..faeb555d77 100644 --- a/fractal_server/app/routes/api/v2/status_legacy.py +++ b/fractal_server/app/routes/api/v2/status_legacy.py @@ -9,8 +9,8 @@ from fractal_server.app.models.v2 import JobV2 from fractal_server.app.routes.auth import current_user_act_ver_prof from fractal_server.app.schemas.v2.sharing import ProjectPermissions -from fractal_server.app.schemas.v2.status_legacy import LegacyStatusReadV2 -from fractal_server.app.schemas.v2.status_legacy import WorkflowTaskStatusTypeV2 +from fractal_server.app.schemas.v2.status_legacy import LegacyStatusRead +from fractal_server.app.schemas.v2.status_legacy import WorkflowTaskStatusType from fractal_server.logger import set_logger from ._aux_functions import _get_dataset_check_access @@ -24,7 +24,7 @@ @router.get( "/project/{project_id}/status-legacy/", - response_model=LegacyStatusReadV2, + response_model=LegacyStatusRead, ) async def get_workflowtask_status( project_id: int, @@ -32,7 +32,7 @@ async def get_workflowtask_status( workflow_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> LegacyStatusReadV2 | None: +) -> LegacyStatusRead | None: """ Extract the status of all `WorkflowTaskV2` of a given `WorkflowV2` that ran on a given `DatasetV2`. @@ -116,18 +116,18 @@ async def get_workflowtask_status( ] try: first_submitted_index = running_job_statuses.index( - WorkflowTaskStatusTypeV2.SUBMITTED + WorkflowTaskStatusType.SUBMITTED ) except ValueError: logger.warning( f"Job {running_job.id} is submitted but its task list does not" - f" contain a {WorkflowTaskStatusTypeV2.SUBMITTED} task." + f" contain a {WorkflowTaskStatusType.SUBMITTED} task." ) first_submitted_index = 0 for wftask in running_job_wftasks[first_submitted_index:]: workflow_tasks_status_dict[wftask.id] = ( - WorkflowTaskStatusTypeV2.SUBMITTED + WorkflowTaskStatusType.SUBMITTED ) # The last workflow task that is included in the submitted job is also @@ -157,7 +157,7 @@ async def get_workflowtask_status( # If a wftask ID was not found, ignore it and continue continue clean_workflow_tasks_status_dict[str(wf_task.id)] = wf_task_status - if wf_task_status == WorkflowTaskStatusTypeV2.FAILED: + if wf_task_status == WorkflowTaskStatusType.FAILED: # Starting from the beginning of `workflow.task_list`, stop the # first time that you hit a failed job break @@ -166,5 +166,5 @@ async def get_workflowtask_status( # first time that you hit `last_valid_wftask_id`` break - response_body = LegacyStatusReadV2(status=clean_workflow_tasks_status_dict) + response_body = LegacyStatusRead(status=clean_workflow_tasks_status_dict) return response_body diff --git a/fractal_server/app/routes/api/v2/submit.py b/fractal_server/app/routes/api/v2/submit.py index e4ab341c6d..54a65cf411 100644 --- a/fractal_server/app/routes/api/v2/submit.py +++ b/fractal_server/app/routes/api/v2/submit.py @@ -23,9 +23,9 @@ from fractal_server.app.routes.aux.validate_user_profile import ( validate_user_profile, ) -from fractal_server.app.schemas.v2 import JobCreateV2 -from fractal_server.app.schemas.v2 import JobReadV2 -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobCreate +from fractal_server.app.schemas.v2 import JobRead +from fractal_server.app.schemas.v2 import JobStatusType from fractal_server.app.schemas.v2 import ResourceType from fractal_server.app.schemas.v2.sharing import ProjectPermissions from fractal_server.config import get_settings @@ -49,18 +49,18 @@ @router.post( "/project/{project_id}/job/submit/", status_code=status.HTTP_202_ACCEPTED, - response_model=JobReadV2, + response_model=JobRead, ) async def apply_workflow( project_id: int, workflow_id: int, dataset_id: int, - job_create: JobCreateV2, + job_create: JobCreate, background_tasks: BackgroundTasks, request: Request, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> JobReadV2 | None: +) -> JobRead | None: # Remove non-submitted V2 jobs from the app state when the list grows # beyond a threshold # NOTE: this may lead to a race condition on `app.state.jobsV2` if two @@ -152,7 +152,7 @@ async def apply_workflow( stm = ( select(JobV2) .where(JobV2.dataset_id == dataset_id) - .where(JobV2.status == JobStatusTypeV2.SUBMITTED) + .where(JobV2.status == JobStatusType.SUBMITTED) ) res = await db.execute(stm) if res.scalars().all(): diff --git a/fractal_server/app/routes/api/v2/task.py b/fractal_server/app/routes/api/v2/task.py index 521777528a..131bed5910 100644 --- a/fractal_server/app/routes/api/v2/task.py +++ b/fractal_server/app/routes/api/v2/task.py @@ -25,11 +25,11 @@ from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.models.v2 import TaskV2 from fractal_server.app.routes.auth import current_user_act_ver_prof -from fractal_server.app.schemas.v2 import TaskCreateV2 -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum -from fractal_server.app.schemas.v2 import TaskReadV2 +from fractal_server.app.schemas.v2 import TaskCreate +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum +from fractal_server.app.schemas.v2 import TaskRead from fractal_server.app.schemas.v2 import TaskType -from fractal_server.app.schemas.v2 import TaskUpdateV2 +from fractal_server.app.schemas.v2 import TaskUpdate from fractal_server.logger import set_logger router = APIRouter() @@ -37,7 +37,7 @@ logger = set_logger(__name__) -@router.get("/", response_model=list[TaskReadV2]) +@router.get("/", response_model=list[TaskRead]) async def get_list_task( args_schema: bool = True, category: str | None = None, @@ -45,7 +45,7 @@ async def get_list_task( author: str | None = None, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> list[TaskReadV2]: +) -> list[TaskRead]: """ Get list of available tasks """ @@ -86,12 +86,12 @@ async def get_list_task( return task_list -@router.get("/{task_id}/", response_model=TaskReadV2) +@router.get("/{task_id}/", response_model=TaskRead) async def get_task( task_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> TaskReadV2: +) -> TaskRead: """ Get info on a specific task """ @@ -99,13 +99,13 @@ async def get_task( return task -@router.patch("/{task_id}/", response_model=TaskReadV2) +@router.patch("/{task_id}/", response_model=TaskRead) async def patch_task( task_id: int, - task_update: TaskUpdateV2, + task_update: TaskUpdate, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> TaskReadV2 | None: +) -> TaskRead | None: """ Edit a specific task (restricted to task owner) """ @@ -137,16 +137,14 @@ async def patch_task( return db_task -@router.post( - "/", response_model=TaskReadV2, status_code=status.HTTP_201_CREATED -) +@router.post("/", response_model=TaskRead, status_code=status.HTTP_201_CREATED) async def create_task( - task: TaskCreateV2, + task: TaskCreate, user_group_id: int | None = None, private: bool = False, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> TaskReadV2 | None: +) -> TaskRead | None: """ Create a new task """ @@ -211,7 +209,7 @@ async def create_task( resource_id=resource_id, active=True, task_list=[db_task], - origin=TaskGroupV2OriginEnum.OTHER, + origin=TaskGroupOriginEnum.OTHER, version=db_task.version, pkg_name=pkg_name, ) diff --git a/fractal_server/app/routes/api/v2/task_collection.py b/fractal_server/app/routes/api/v2/task_collection.py index e9049c8803..26d1684dd7 100644 --- a/fractal_server/app/routes/api/v2/task_collection.py +++ b/fractal_server/app/routes/api/v2/task_collection.py @@ -25,12 +25,12 @@ ) from fractal_server.app.schemas.v2 import FractalUploadedFile from fractal_server.app.schemas.v2 import ResourceType -from fractal_server.app.schemas.v2 import TaskCollectPipV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read -from fractal_server.app.schemas.v2 import TaskGroupCreateV2Strict -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum +from fractal_server.app.schemas.v2 import TaskCollectPip +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityRead +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2 import TaskGroupCreateStrict +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.tasks.v2.local.collect import collect_local @@ -59,9 +59,9 @@ class CollectionRequestData(BaseModel): Validate form data _and_ wheel file. """ - task_collect: TaskCollectPipV2 + task_collect: TaskCollectPip file: UploadFile | None = None - origin: TaskGroupV2OriginEnum + origin: TaskGroupOriginEnum @model_validator(mode="before") @classmethod @@ -75,7 +75,7 @@ def validate_data(cls, values): raise ValueError( "When no `file` is provided, `package` is required." ) - values["origin"] = TaskGroupV2OriginEnum.PYPI + values["origin"] = TaskGroupOriginEnum.PYPI else: if package is not None: raise ValueError( @@ -87,7 +87,7 @@ def validate_data(cls, values): "Cannot set `package_version` when `file` is " f"provided (given package_version='{package_version}')." ) - values["origin"] = TaskGroupV2OriginEnum.WHEELFILE + values["origin"] = TaskGroupOriginEnum.WHEELFILE for forbidden_char in FORBIDDEN_CHAR_WHEEL: if forbidden_char in file.filename: @@ -125,7 +125,7 @@ def parse_request_data( else None ) # Validate and coerce form data - task_collect_pip = TaskCollectPipV2( + task_collect_pip = TaskCollectPip( package=package, package_version=package_version, package_extras=package_extras, @@ -150,7 +150,7 @@ def parse_request_data( @router.post( "/collect/pip/", - response_model=TaskGroupActivityV2Read, + response_model=TaskGroupActivityRead, ) async def collect_tasks_pip( response: Response, @@ -160,7 +160,7 @@ async def collect_tasks_pip( user_group_id: int | None = None, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> TaskGroupActivityV2Read: +) -> TaskGroupActivityRead: """ Task-collection endpoint """ @@ -221,7 +221,7 @@ async def collect_tasks_pip( wheel_file = None # Set pkg_name, version, origin and archive_path - if request_data.origin == TaskGroupV2OriginEnum.WHEELFILE: + if request_data.origin == TaskGroupOriginEnum.WHEELFILE: try: wheel_filename = request_data.file.filename wheel_info = _parse_wheel_filename(wheel_filename) @@ -242,7 +242,7 @@ async def collect_tasks_pip( wheel_info["distribution"] ) task_group_attrs["version"] = wheel_info["version"] - elif request_data.origin == TaskGroupV2OriginEnum.PYPI: + elif request_data.origin == TaskGroupOriginEnum.PYPI: pkg_name = task_collect.package task_group_attrs["pkg_name"] = normalize_package_name(pkg_name) latest_version = await get_package_version_from_pypi( @@ -278,7 +278,7 @@ async def collect_tasks_pip( # Validate TaskGroupV2 attributes try: - TaskGroupCreateV2Strict(**task_group_attrs) + TaskGroupCreateStrict(**task_group_attrs) except ValidationError as e: raise HTTPException( status_code=status.HTTP_422_UNPROCESSABLE_CONTENT, @@ -328,8 +328,8 @@ async def collect_tasks_pip( task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.COLLECT, pkg_name=task_group.pkg_name, version=task_group.version, ) diff --git a/fractal_server/app/routes/api/v2/task_collection_custom.py b/fractal_server/app/routes/api/v2/task_collection_custom.py index 659047d408..aca0ecf890 100644 --- a/fractal_server/app/routes/api/v2/task_collection_custom.py +++ b/fractal_server/app/routes/api/v2/task_collection_custom.py @@ -17,11 +17,11 @@ validate_user_profile, ) from fractal_server.app.schemas.v2 import ResourceType -from fractal_server.app.schemas.v2 import TaskCollectCustomV2 -from fractal_server.app.schemas.v2 import TaskCreateV2 -from fractal_server.app.schemas.v2 import TaskGroupCreateV2 -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum -from fractal_server.app.schemas.v2 import TaskReadV2 +from fractal_server.app.schemas.v2 import TaskCollectCustom +from fractal_server.app.schemas.v2 import TaskCreate +from fractal_server.app.schemas.v2 import TaskGroupCreate +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum +from fractal_server.app.schemas.v2 import TaskRead from fractal_server.logger import set_logger from fractal_server.string_tools import validate_cmd from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata @@ -38,16 +38,14 @@ logger = set_logger(__name__) -@router.post( - "/collect/custom/", status_code=201, response_model=list[TaskReadV2] -) +@router.post("/collect/custom/", status_code=201, response_model=list[TaskRead]) async def collect_task_custom( - task_collect: TaskCollectCustomV2, + task_collect: TaskCollectCustom, private: bool = False, user_group_id: int | None = None, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> list[TaskReadV2]: +) -> list[TaskRead]: # Get validated resource and profile resource, profile = await validate_user_profile(user=user, db=db) resource_id = resource.id @@ -139,7 +137,7 @@ async def collect_task_custom( else: package_root = Path(task_collect.package_root) - task_list: list[TaskCreateV2] = prepare_tasks_metadata( + task_list: list[TaskCreate] = prepare_tasks_metadata( package_manifest=task_collect.manifest, python_bin=Path(task_collect.python_interpreter), package_root=package_root, @@ -148,14 +146,14 @@ async def collect_task_custom( # Prepare task-group attributes task_group_attrs = dict( - origin=TaskGroupV2OriginEnum.OTHER, + origin=TaskGroupOriginEnum.OTHER, pkg_name=task_collect.label, user_id=user.id, user_group_id=user_group_id, version=task_collect.version, resource_id=resource_id, ) - TaskGroupCreateV2(**task_group_attrs) + TaskGroupCreate(**task_group_attrs) # Verify non-duplication constraints await _verify_non_duplication_user_constraint( diff --git a/fractal_server/app/routes/api/v2/task_collection_pixi.py b/fractal_server/app/routes/api/v2/task_collection_pixi.py index a13ba71848..3a176ae430 100644 --- a/fractal_server/app/routes/api/v2/task_collection_pixi.py +++ b/fractal_server/app/routes/api/v2/task_collection_pixi.py @@ -33,10 +33,10 @@ ) from fractal_server.app.schemas.v2 import FractalUploadedFile from fractal_server.app.schemas.v2 import ResourceType -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read -from fractal_server.app.schemas.v2.task_group import TaskGroupV2OriginEnum +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityRead +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2.task_group import TaskGroupOriginEnum from fractal_server.logger import set_logger from fractal_server.tasks.v2.local import collect_local_pixi from fractal_server.tasks.v2.ssh import collect_ssh_pixi @@ -74,7 +74,7 @@ def validate_pkgname_and_version(filename: str) -> tuple[str, str]: @router.post( "/collect/pixi/", status_code=202, - response_model=TaskGroupActivityV2Read, + response_model=TaskGroupActivityRead, ) async def collect_task_pixi( response: Response, @@ -85,7 +85,7 @@ async def collect_task_pixi( user_group_id: int | None = None, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> TaskGroupActivityV2Read: +) -> TaskGroupActivityRead: # Get validated resource and profile resource, profile = await validate_user_profile(user=user, db=db) resource_id = resource.id @@ -136,7 +136,7 @@ async def collect_task_pixi( user_id=user.id, user_group_id=user_group_id, resource_id=resource_id, - origin=TaskGroupV2OriginEnum.PIXI, + origin=TaskGroupOriginEnum.PIXI, pixi_version=pixi_version, pkg_name=pkg_name, version=version, @@ -178,8 +178,8 @@ async def collect_task_pixi( task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.COLLECT, pkg_name=task_group.pkg_name, version=task_group.version, ) diff --git a/fractal_server/app/routes/api/v2/task_group.py b/fractal_server/app/routes/api/v2/task_group.py index 7d92b9dfa9..b856924f08 100644 --- a/fractal_server/app/routes/api/v2/task_group.py +++ b/fractal_server/app/routes/api/v2/task_group.py @@ -24,11 +24,11 @@ from fractal_server.app.routes.auth._aux_auth import ( _verify_user_belongs_to_group, ) -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read -from fractal_server.app.schemas.v2 import TaskGroupReadV2 -from fractal_server.app.schemas.v2 import TaskGroupUpdateV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityRead +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2 import TaskGroupRead +from fractal_server.app.schemas.v2 import TaskGroupUpdate from fractal_server.logger import set_logger from ._aux_functions import _get_user_resource_id @@ -62,17 +62,17 @@ def _version_sort_key( return (1, task_group.version) -@router.get("/activity/", response_model=list[TaskGroupActivityV2Read]) +@router.get("/activity/", response_model=list[TaskGroupActivityRead]) async def get_task_group_activity_list( task_group_activity_id: int | None = None, taskgroupv2_id: int | None = None, pkg_name: str | None = None, - status: TaskGroupActivityStatusV2 | None = None, - action: TaskGroupActivityActionV2 | None = None, + status: TaskGroupActivityStatus | None = None, + action: TaskGroupActivityAction | None = None, timestamp_started_min: AwareDatetime | None = None, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> list[TaskGroupActivityV2Read]: +) -> list[TaskGroupActivityRead]: stm = select(TaskGroupActivityV2).where( TaskGroupActivityV2.user_id == user.id ) @@ -98,13 +98,13 @@ async def get_task_group_activity_list( @router.get( "/activity/{task_group_activity_id}/", - response_model=TaskGroupActivityV2Read, + response_model=TaskGroupActivityRead, ) async def get_task_group_activity( task_group_activity_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> TaskGroupActivityV2Read: +) -> TaskGroupActivityRead: activity = await db.get(TaskGroupActivityV2, task_group_activity_id) if activity is None: @@ -124,14 +124,14 @@ async def get_task_group_activity( return activity -@router.get("/", response_model=list[tuple[str, list[TaskGroupReadV2]]]) +@router.get("/", response_model=list[tuple[str, list[TaskGroupRead]]]) async def get_task_group_list( user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), only_active: bool = False, only_owner: bool = False, args_schema: bool = True, -) -> list[tuple[str, list[TaskGroupReadV2]]]: +) -> list[tuple[str, list[TaskGroupRead]]]: """ Get all accessible TaskGroups """ @@ -190,12 +190,12 @@ async def get_task_group_list( return grouped_result -@router.get("/{task_group_id}/", response_model=TaskGroupReadV2) +@router.get("/{task_group_id}/", response_model=TaskGroupRead) async def get_task_group( task_group_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> TaskGroupReadV2: +) -> TaskGroupRead: """ Get single TaskGroup """ @@ -207,13 +207,13 @@ async def get_task_group( return task_group -@router.patch("/{task_group_id}/", response_model=TaskGroupReadV2) +@router.patch("/{task_group_id}/", response_model=TaskGroupRead) async def patch_task_group( task_group_id: int, - task_group_update: TaskGroupUpdateV2, + task_group_update: TaskGroupUpdate, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> TaskGroupReadV2: +) -> TaskGroupRead: """ Patch single TaskGroup """ diff --git a/fractal_server/app/routes/api/v2/task_group_lifecycle.py b/fractal_server/app/routes/api/v2/task_group_lifecycle.py index 57e551eed0..7f82080874 100644 --- a/fractal_server/app/routes/api/v2/task_group_lifecycle.py +++ b/fractal_server/app/routes/api/v2/task_group_lifecycle.py @@ -14,11 +14,11 @@ validate_user_profile, ) from fractal_server.app.schemas.v2 import ResourceType -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read -from fractal_server.app.schemas.v2 import TaskGroupReadV2 -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityRead +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum +from fractal_server.app.schemas.v2 import TaskGroupRead from fractal_server.logger import set_logger from fractal_server.tasks.v2.local import deactivate_local from fractal_server.tasks.v2.local import deactivate_local_pixi @@ -45,7 +45,7 @@ @router.post( "/{task_group_id}/deactivate/", - response_model=TaskGroupActivityV2Read, + response_model=TaskGroupActivityRead, ) async def deactivate_task_group( task_group_id: int, @@ -53,7 +53,7 @@ async def deactivate_task_group( response: Response, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> TaskGroupActivityV2Read: +) -> TaskGroupActivityRead: """ Deactivate task-group venv """ @@ -84,13 +84,13 @@ async def deactivate_task_group( ) # Shortcut for task-group with origin="other" - if task_group.origin == TaskGroupV2OriginEnum.OTHER: + if task_group.origin == TaskGroupOriginEnum.OTHER: task_group.active = False task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.OK, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.OK, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name=task_group.pkg_name, version=(task_group.version or "N/A"), log=( @@ -109,8 +109,8 @@ async def deactivate_task_group( task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, timestamp_started=get_timestamp(), @@ -122,12 +122,12 @@ async def deactivate_task_group( # Submit background task if resource.type == ResourceType.SLURM_SSH: - if task_group.origin == TaskGroupV2OriginEnum.PIXI: + if task_group.origin == TaskGroupOriginEnum.PIXI: deactivate_function = deactivate_ssh_pixi else: deactivate_function = deactivate_ssh else: - if task_group.origin == TaskGroupV2OriginEnum.PIXI: + if task_group.origin == TaskGroupOriginEnum.PIXI: deactivate_function = deactivate_local_pixi else: deactivate_function = deactivate_local @@ -149,7 +149,7 @@ async def deactivate_task_group( @router.post( "/{task_group_id}/reactivate/", - response_model=TaskGroupActivityV2Read, + response_model=TaskGroupActivityRead, ) async def reactivate_task_group( task_group_id: int, @@ -157,7 +157,7 @@ async def reactivate_task_group( response: Response, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> TaskGroupReadV2: +) -> TaskGroupRead: """ Deactivate task-group venv """ @@ -187,13 +187,13 @@ async def reactivate_task_group( await check_no_submitted_job(task_group_id=task_group.id, db=db) # Shortcut for task-group with origin="other" - if task_group.origin == TaskGroupV2OriginEnum.OTHER: + if task_group.origin == TaskGroupOriginEnum.OTHER: task_group.active = True task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.OK, - action=TaskGroupActivityActionV2.REACTIVATE, + status=TaskGroupActivityStatus.OK, + action=TaskGroupActivityAction.REACTIVATE, pkg_name=task_group.pkg_name, version=(task_group.version or "N/A"), log=( @@ -220,8 +220,8 @@ async def reactivate_task_group( task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.REACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.REACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, timestamp_started=get_timestamp(), @@ -231,12 +231,12 @@ async def reactivate_task_group( # Submit background task if resource.type == ResourceType.SLURM_SSH: - if task_group.origin == TaskGroupV2OriginEnum.PIXI: + if task_group.origin == TaskGroupOriginEnum.PIXI: reactivate_function = reactivate_ssh_pixi else: reactivate_function = reactivate_ssh else: - if task_group.origin == TaskGroupV2OriginEnum.PIXI: + if task_group.origin == TaskGroupOriginEnum.PIXI: reactivate_function = reactivate_local_pixi else: reactivate_function = reactivate_local @@ -265,7 +265,7 @@ async def delete_task_group( response: Response, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> TaskGroupActivityV2Read: +) -> TaskGroupActivityRead: """ Deletion of task-group from db and file system """ @@ -283,8 +283,8 @@ async def delete_task_group( task_group_activity = TaskGroupActivityV2( user_id=task_group.user_id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DELETE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DELETE, pkg_name=task_group.pkg_name, version=(task_group.version or "N/A"), timestamp_started=get_timestamp(), diff --git a/fractal_server/app/routes/api/v2/task_version_update.py b/fractal_server/app/routes/api/v2/task_version_update.py index dbe2aabfca..bc008c391b 100644 --- a/fractal_server/app/routes/api/v2/task_version_update.py +++ b/fractal_server/app/routes/api/v2/task_version_update.py @@ -19,8 +19,8 @@ from fractal_server.app.models.v2 import TaskV2 from fractal_server.app.routes.auth import current_user_act_ver_prof from fractal_server.app.schemas.v2 import TaskType -from fractal_server.app.schemas.v2 import WorkflowTaskReadV2 -from fractal_server.app.schemas.v2 import WorkflowTaskReplaceV2 +from fractal_server.app.schemas.v2 import WorkflowTaskRead +from fractal_server.app.schemas.v2 import WorkflowTaskReplace from fractal_server.app.schemas.v2.sharing import ProjectPermissions from ._aux_functions import _get_workflow_check_access @@ -171,7 +171,7 @@ async def get_workflow_version_update_candidates( @router.post( "/project/{project_id}/workflow/{workflow_id}/wftask/replace-task/", - response_model=WorkflowTaskReadV2, + response_model=WorkflowTaskRead, status_code=status.HTTP_201_CREATED, ) async def replace_workflowtask( @@ -179,10 +179,10 @@ async def replace_workflowtask( workflow_id: int, workflow_task_id: int, task_id: int, - replace: WorkflowTaskReplaceV2, + replace: WorkflowTaskReplace, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> WorkflowTaskReadV2: +) -> WorkflowTaskRead: # Get objects from database workflow_task, workflow = await _get_workflow_task_check_access( project_id=project_id, diff --git a/fractal_server/app/routes/api/v2/workflow.py b/fractal_server/app/routes/api/v2/workflow.py index c244e18d9c..217a1c6323 100644 --- a/fractal_server/app/routes/api/v2/workflow.py +++ b/fractal_server/app/routes/api/v2/workflow.py @@ -15,11 +15,11 @@ from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.models.v2 import WorkflowV2 from fractal_server.app.routes.auth import current_user_act_ver_prof -from fractal_server.app.schemas.v2 import WorkflowCreateV2 -from fractal_server.app.schemas.v2 import WorkflowExportV2 -from fractal_server.app.schemas.v2 import WorkflowReadV2 -from fractal_server.app.schemas.v2 import WorkflowReadV2WithWarnings -from fractal_server.app.schemas.v2 import WorkflowUpdateV2 +from fractal_server.app.schemas.v2 import WorkflowCreate +from fractal_server.app.schemas.v2 import WorkflowExport +from fractal_server.app.schemas.v2 import WorkflowRead +from fractal_server.app.schemas.v2 import WorkflowReadWithWarnings +from fractal_server.app.schemas.v2 import WorkflowUpdate from fractal_server.app.schemas.v2.sharing import ProjectPermissions from fractal_server.images.tools import merge_type_filters @@ -35,13 +35,13 @@ @router.get( "/project/{project_id}/workflow/", - response_model=list[WorkflowReadV2], + response_model=list[WorkflowRead], ) async def get_workflow_list( project_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> list[WorkflowReadV2] | None: +) -> list[WorkflowRead] | None: """ Get workflow list for given project """ @@ -63,15 +63,15 @@ async def get_workflow_list( @router.post( "/project/{project_id}/workflow/", - response_model=WorkflowReadV2, + response_model=WorkflowRead, status_code=status.HTTP_201_CREATED, ) async def create_workflow( project_id: int, - workflow: WorkflowCreateV2, + workflow: WorkflowCreate, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> WorkflowReadV2 | None: +) -> WorkflowRead | None: """ Create a workflow, associate to a project """ @@ -95,14 +95,14 @@ async def create_workflow( @router.get( "/project/{project_id}/workflow/{workflow_id}/", - response_model=WorkflowReadV2WithWarnings, + response_model=WorkflowReadWithWarnings, ) async def read_workflow( project_id: int, workflow_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> WorkflowReadV2WithWarnings | None: +) -> WorkflowReadWithWarnings | None: """ Get info on an existing workflow """ @@ -129,15 +129,15 @@ async def read_workflow( @router.patch( "/project/{project_id}/workflow/{workflow_id}/", - response_model=WorkflowReadV2WithWarnings, + response_model=WorkflowReadWithWarnings, ) async def update_workflow( project_id: int, workflow_id: int, - patch: WorkflowUpdateV2, + patch: WorkflowUpdate, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> WorkflowReadV2WithWarnings | None: +) -> WorkflowReadWithWarnings | None: """ Edit a workflow """ @@ -251,14 +251,14 @@ async def delete_workflow( @router.get( "/project/{project_id}/workflow/{workflow_id}/export/", - response_model=WorkflowExportV2, + response_model=WorkflowExport, ) async def export_workflow( project_id: int, workflow_id: int, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> WorkflowExportV2 | None: +) -> WorkflowExport | None: """ Export an existing workflow, after stripping all IDs """ @@ -279,7 +279,7 @@ async def export_workflow( name=wftask.task.name, ) - wf = WorkflowExportV2( + wf = WorkflowExport( **workflow.model_dump(), task_list=wf_task_list, ) diff --git a/fractal_server/app/routes/api/v2/workflow_import.py b/fractal_server/app/routes/api/v2/workflow_import.py index 613bea70c1..3f80bd1f2f 100644 --- a/fractal_server/app/routes/api/v2/workflow_import.py +++ b/fractal_server/app/routes/api/v2/workflow_import.py @@ -19,11 +19,11 @@ from fractal_server.app.routes.auth._aux_auth import ( _get_default_usergroup_id_or_none, ) -from fractal_server.app.schemas.v2 import TaskImportV2 -from fractal_server.app.schemas.v2 import TaskImportV2Legacy -from fractal_server.app.schemas.v2 import WorkflowImportV2 -from fractal_server.app.schemas.v2 import WorkflowReadV2WithWarnings -from fractal_server.app.schemas.v2 import WorkflowTaskCreateV2 +from fractal_server.app.schemas.v2 import TaskImport +from fractal_server.app.schemas.v2 import TaskImportLegacy +from fractal_server.app.schemas.v2 import WorkflowImport +from fractal_server.app.schemas.v2 import WorkflowReadWithWarnings +from fractal_server.app.schemas.v2 import WorkflowTaskCreate from fractal_server.app.schemas.v2.sharing import ProjectPermissions from fractal_server.logger import set_logger @@ -101,7 +101,7 @@ async def _get_task_by_source( async def _get_task_by_taskimport( *, - task_import: TaskImportV2, + task_import: TaskImport, task_groups_list: list[TaskGroupV2], user_id: int, default_group_id: int | None, @@ -207,15 +207,15 @@ async def _get_task_by_taskimport( @router.post( "/project/{project_id}/workflow/import/", - response_model=WorkflowReadV2WithWarnings, + response_model=WorkflowReadWithWarnings, status_code=status.HTTP_201_CREATED, ) async def import_workflow( project_id: int, - workflow_import: WorkflowImportV2, + workflow_import: WorkflowImport, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> WorkflowReadV2WithWarnings: +) -> WorkflowReadWithWarnings: """ Import an existing workflow into a project and create required objects. """ @@ -246,7 +246,7 @@ async def import_workflow( list_task_ids = [] for wf_task in workflow_import.task_list: task_import = wf_task.task - if isinstance(task_import, TaskImportV2Legacy): + if isinstance(task_import, TaskImportLegacy): task_id = await _get_task_by_source( source=task_import.source, task_groups_list=task_group_list, @@ -264,7 +264,7 @@ async def import_workflow( status_code=status.HTTP_422_UNPROCESSABLE_CONTENT, detail=f"Could not find a task matching with {wf_task.task}.", ) - new_wf_task = WorkflowTaskCreateV2( + new_wf_task = WorkflowTaskCreate( **wf_task.model_dump(exclude_none=True, exclude={"task"}) ) list_wf_tasks.append(new_wf_task) diff --git a/fractal_server/app/routes/api/v2/workflowtask.py b/fractal_server/app/routes/api/v2/workflowtask.py index 5856ed2df2..670e5665c9 100644 --- a/fractal_server/app/routes/api/v2/workflowtask.py +++ b/fractal_server/app/routes/api/v2/workflowtask.py @@ -11,9 +11,9 @@ from fractal_server.app.models import UserOAuth from fractal_server.app.routes.auth import current_user_act_ver_prof from fractal_server.app.schemas.v2 import TaskType -from fractal_server.app.schemas.v2 import WorkflowTaskCreateV2 -from fractal_server.app.schemas.v2 import WorkflowTaskReadV2 -from fractal_server.app.schemas.v2 import WorkflowTaskUpdateV2 +from fractal_server.app.schemas.v2 import WorkflowTaskCreate +from fractal_server.app.schemas.v2 import WorkflowTaskRead +from fractal_server.app.schemas.v2 import WorkflowTaskUpdate from fractal_server.app.schemas.v2.sharing import ProjectPermissions from ._aux_functions import _get_workflow_check_access @@ -28,17 +28,17 @@ @router.post( "/project/{project_id}/workflow/{workflow_id}/wftask/", - response_model=WorkflowTaskReadV2, + response_model=WorkflowTaskRead, status_code=status.HTTP_201_CREATED, ) async def create_workflowtask( project_id: int, workflow_id: int, task_id: int, - wftask: WorkflowTaskCreateV2, + wftask: WorkflowTaskCreate, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> WorkflowTaskReadV2 | None: +) -> WorkflowTaskRead | None: """ Add a WorkflowTask to a Workflow """ @@ -100,7 +100,7 @@ async def create_workflowtask( @router.get( "/project/{project_id}/workflow/{workflow_id}/wftask/{workflow_task_id}/", - response_model=WorkflowTaskReadV2, + response_model=WorkflowTaskRead, ) async def read_workflowtask( project_id: int, @@ -122,16 +122,16 @@ async def read_workflowtask( @router.patch( "/project/{project_id}/workflow/{workflow_id}/wftask/{workflow_task_id}/", - response_model=WorkflowTaskReadV2, + response_model=WorkflowTaskRead, ) async def update_workflowtask( project_id: int, workflow_id: int, workflow_task_id: int, - workflow_task_update: WorkflowTaskUpdateV2, + workflow_task_update: WorkflowTaskUpdate, user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), -) -> WorkflowTaskReadV2 | None: +) -> WorkflowTaskRead | None: """ Edit a WorkflowTask of a Workflow """ diff --git a/fractal_server/app/schemas/v2/__init__.py b/fractal_server/app/schemas/v2/__init__.py index d8997fed0c..47d90d9d9a 100644 --- a/fractal_server/app/schemas/v2/__init__.py +++ b/fractal_server/app/schemas/v2/__init__.py @@ -1,35 +1,35 @@ from .accounting import AccountingRecordRead # noqa F401 -from .dataset import DatasetCreateV2 # noqa F401 -from .dataset import DatasetExportV2 # noqa F401 -from .dataset import DatasetImportV2 # noqa F401 -from .dataset import DatasetReadV2 # noqa F401 -from .dataset import DatasetUpdateV2 # noqa F401 -from .dumps import DatasetDumpV2 # noqa F401 -from .dumps import ProjectDumpV2 # noqa F401 -from .dumps import TaskDumpV2 # noqa F401 -from .dumps import TaskGroupDumpV2 # noqa F401 -from .dumps import WorkflowDumpV2 # noqa F401 -from .dumps import WorkflowTaskDumpV2 # noqa F401 +from .dataset import DatasetCreate # noqa F401 +from .dataset import DatasetExport # noqa F401 +from .dataset import DatasetImport # noqa F401 +from .dataset import DatasetRead # noqa F401 +from .dataset import DatasetUpdate # noqa F401 +from .dumps import DatasetDump # noqa F401 +from .dumps import ProjectDump # noqa F401 +from .dumps import TaskDump # noqa F401 +from .dumps import TaskGroupDump # noqa F401 +from .dumps import WorkflowDump # noqa F401 +from .dumps import WorkflowTaskDump # noqa F401 from .history import HistoryRunRead # noqa F401 from .history import HistoryRunReadAggregated # noqa F401 from .history import HistoryUnitRead # noqa F401 from .history import HistoryUnitStatus # noqa F401 from .history import HistoryUnitStatusWithUnset # noqa F401 from .history import ImageLogsRequest # noqa F401 -from .job import JobCreateV2 # noqa F401 -from .job import JobReadV2 # noqa F401 -from .job import JobStatusTypeV2 # noqa F401 -from .job import JobUpdateV2 # noqa F401 -from .manifest import ManifestV2 # noqa F401 -from .manifest import TaskManifestV2 # noqa F401 +from .job import JobCreate # noqa F401 +from .job import JobRead # noqa F401 +from .job import JobStatusType # noqa F401 +from .job import JobUpdate # noqa F401 +from .manifest import Manifest # noqa F401 +from .manifest import TaskManifest # noqa F401 from .profile import ProfileCreate # noqa F401 from .profile import ProfileRead # noqa F401 from .profile import ValidProfileLocal # noqa F401 from .profile import ValidProfileSlurmSSH # noqa F401 from .profile import ValidProfileSlurmSudo # noqa F401 -from .project import ProjectCreateV2 # noqa F401 -from .project import ProjectReadV2 # noqa F401 -from .project import ProjectUpdateV2 # noqa F401 +from .project import ProjectCreate # noqa F401 +from .project import ProjectRead # noqa F401 +from .project import ProjectUpdate # noqa F401 from .sharing import ProjectPermissions # noqa F401 from .sharing import ProjectGuestCreate # noqa F401 from .sharing import ProjectAccessRead # noqa F401 @@ -43,36 +43,36 @@ from .resource import ValidResourceLocal # noqa F401 from .resource import ValidResourceSlurmSSH # noqa F401 from .resource import ValidResourceSlurmSudo # noqa F401 -from .status_legacy import WorkflowTaskStatusTypeV2 # noqa F401 -from .task import TaskCreateV2 # noqa F401 -from .task import TaskExportV2 # noqa F401 -from .task import TaskImportV2 # noqa F401 -from .task import TaskImportV2Legacy # noqa F401 -from .task import TaskReadV2 # noqa F401 +from .status_legacy import WorkflowTaskStatusType # noqa F401 +from .task import TaskCreate # noqa F401 +from .task import TaskExport # noqa F401 +from .task import TaskImport # noqa F401 +from .task import TaskImportLegacy # noqa F401 +from .task import TaskRead # noqa F401 from .task import TaskType # noqa F401 -from .task import TaskUpdateV2 # noqa F401 +from .task import TaskUpdate # noqa F401 from .task_collection import FractalUploadedFile # noqa F401 -from .task_collection import TaskCollectCustomV2 # noqa F401 -from .task_collection import TaskCollectPipV2 # noqa F401 -from .task_group import TaskGroupActivityActionV2 # noqa F401 -from .task_group import TaskGroupActivityStatusV2 # noqa F401 -from .task_group import TaskGroupActivityV2Read # noqa F401 -from .task_group import TaskGroupCreateV2 # noqa F401 -from .task_group import TaskGroupCreateV2Strict # noqa F401 +from .task_collection import TaskCollectCustom # noqa F401 +from .task_collection import TaskCollectPip # noqa F401 +from .task_group import TaskGroupActivityAction # noqa F401 +from .task_group import TaskGroupActivityStatus # noqa F401 +from .task_group import TaskGroupActivityRead # noqa F401 +from .task_group import TaskGroupCreate # noqa F401 +from .task_group import TaskGroupCreateStrict # noqa F401 from .task_group import TaskGroupReadSuperuser # noqa F401 -from .task_group import TaskGroupReadV2 # noqa F401 -from .task_group import TaskGroupUpdateV2 # noqa F401 -from .task_group import TaskGroupV2OriginEnum # noqa F401 -from .workflow import WorkflowCreateV2 # noqa F401 -from .workflow import WorkflowExportV2 # noqa F401 -from .workflow import WorkflowImportV2 # noqa F401 -from .workflow import WorkflowReadV2 # noqa F401 -from .workflow import WorkflowReadV2WithWarnings # noqa F401 -from .workflow import WorkflowUpdateV2 # noqa F401 -from .workflowtask import WorkflowTaskCreateV2 # noqa F401 -from .workflowtask import WorkflowTaskExportV2 # noqa F401 -from .workflowtask import WorkflowTaskImportV2 # noqa F401 -from .workflowtask import WorkflowTaskReadV2 # noqa F401 -from .workflowtask import WorkflowTaskReadV2WithWarning # noqa F401 -from .workflowtask import WorkflowTaskReplaceV2 # noqa F401 -from .workflowtask import WorkflowTaskUpdateV2 # noqa F401 +from .task_group import TaskGroupRead # noqa F401 +from .task_group import TaskGroupUpdate # noqa F401 +from .task_group import TaskGroupOriginEnum # noqa F401 +from .workflow import WorkflowCreate # noqa F401 +from .workflow import WorkflowExport # noqa F401 +from .workflow import WorkflowImport # noqa F401 +from .workflow import WorkflowRead # noqa F401 +from .workflow import WorkflowReadWithWarnings # noqa F401 +from .workflow import WorkflowUpdate # noqa F401 +from .workflowtask import WorkflowTaskCreate # noqa F401 +from .workflowtask import WorkflowTaskExport # noqa F401 +from .workflowtask import WorkflowTaskImport # noqa F401 +from .workflowtask import WorkflowTaskRead # noqa F401 +from .workflowtask import WorkflowTaskReadWithWarning # noqa F401 +from .workflowtask import WorkflowTaskReplace # noqa F401 +from .workflowtask import WorkflowTaskUpdate # noqa F401 diff --git a/fractal_server/app/schemas/v2/dataset.py b/fractal_server/app/schemas/v2/dataset.py index 8c40873368..722cec22ba 100644 --- a/fractal_server/app/schemas/v2/dataset.py +++ b/fractal_server/app/schemas/v2/dataset.py @@ -6,15 +6,15 @@ from pydantic import field_serializer from pydantic.types import AwareDatetime -from fractal_server.app.schemas.v2.project import ProjectReadV2 +from fractal_server.app.schemas.v2.project import ProjectRead from fractal_server.images import SingleImage from fractal_server.types import NonEmptyStr from fractal_server.types import ZarrDirStr -class DatasetCreateV2(BaseModel): +class DatasetCreate(BaseModel): """ - DatasetCreateV2 + DatasetCreate Attributes: name: @@ -27,9 +27,9 @@ class DatasetCreateV2(BaseModel): zarr_dir: ZarrDirStr | None = None -class DatasetReadV2(BaseModel): +class DatasetRead(BaseModel): """ - DatasetReadV2 + DatasetRead Attributes: id: @@ -44,7 +44,7 @@ class DatasetReadV2(BaseModel): name: str project_id: int - project: ProjectReadV2 + project: ProjectRead timestamp_created: AwareDatetime @@ -55,9 +55,9 @@ def serialize_datetime(v: datetime) -> str: return v.isoformat() -class DatasetUpdateV2(BaseModel): +class DatasetUpdate(BaseModel): """ - DatasetUpdateV2 + DatasetUpdate Attributes: name: @@ -70,7 +70,7 @@ class DatasetUpdateV2(BaseModel): zarr_dir: ZarrDirStr | None = None -class DatasetImportV2(BaseModel): +class DatasetImport(BaseModel): """ Class for `Dataset` import. @@ -89,7 +89,7 @@ class DatasetImportV2(BaseModel): images: list[SingleImage] = Field(default_factory=list) -class DatasetExportV2(BaseModel): +class DatasetExport(BaseModel): """ Class for `Dataset` export. diff --git a/fractal_server/app/schemas/v2/dumps.py b/fractal_server/app/schemas/v2/dumps.py index 19c9e0240b..8e335b7c35 100644 --- a/fractal_server/app/schemas/v2/dumps.py +++ b/fractal_server/app/schemas/v2/dumps.py @@ -13,17 +13,17 @@ from pydantic import Field from .task import TaskType -from .task_group import TaskGroupV2OriginEnum +from .task_group import TaskGroupOriginEnum -class ProjectDumpV2(BaseModel): +class ProjectDump(BaseModel): model_config = ConfigDict(extra="forbid") id: int name: str timestamp_created: str -class TaskDumpV2(BaseModel): +class TaskDump(BaseModel): id: int name: str type: TaskType @@ -37,7 +37,7 @@ class TaskDumpV2(BaseModel): output_types: dict[str, bool] -class WorkflowTaskDumpV2(BaseModel): +class WorkflowTaskDump(BaseModel): """ We do not include 'model_config = ConfigDict(extra="forbid")' because legacy data may include 'input_filters' field and we want to avoid @@ -51,10 +51,10 @@ class WorkflowTaskDumpV2(BaseModel): type_filters: dict[str, bool] task_id: int | None = None - task: TaskDumpV2 | None = None + task: TaskDump | None = None -class WorkflowDumpV2(BaseModel): +class WorkflowDump(BaseModel): model_config = ConfigDict(extra="forbid") id: int name: str @@ -62,7 +62,7 @@ class WorkflowDumpV2(BaseModel): timestamp_created: str -class DatasetDumpV2(BaseModel): +class DatasetDump(BaseModel): """ We do not include 'model_config = ConfigDict(extra="forbid")' because legacy data may include 'type_filters' or 'attribute_filters' and we @@ -76,9 +76,9 @@ class DatasetDumpV2(BaseModel): zarr_dir: str -class TaskGroupDumpV2(BaseModel): +class TaskGroupDump(BaseModel): id: int - origin: TaskGroupV2OriginEnum + origin: TaskGroupOriginEnum pkg_name: str version: str | None = None python_version: str | None = None diff --git a/fractal_server/app/schemas/v2/job.py b/fractal_server/app/schemas/v2/job.py index 1abc9a1eb4..7b8aaa7389 100644 --- a/fractal_server/app/schemas/v2/job.py +++ b/fractal_server/app/schemas/v2/job.py @@ -10,15 +10,15 @@ from pydantic.types import NonNegativeInt from pydantic.types import StrictStr -from fractal_server.app.schemas.v2.dumps import DatasetDumpV2 -from fractal_server.app.schemas.v2.dumps import ProjectDumpV2 -from fractal_server.app.schemas.v2.dumps import WorkflowDumpV2 +from fractal_server.app.schemas.v2.dumps import DatasetDump +from fractal_server.app.schemas.v2.dumps import ProjectDump +from fractal_server.app.schemas.v2.dumps import WorkflowDump from fractal_server.types import AttributeFilters from fractal_server.types import NonEmptyStr from fractal_server.types import TypeFilters -class JobStatusTypeV2(StrEnum): +class JobStatusType(StrEnum): """ Define the available job statuses @@ -39,7 +39,7 @@ class JobStatusTypeV2(StrEnum): FAILED = "failed" -class JobCreateV2(BaseModel): +class JobCreate(BaseModel): model_config = ConfigDict(extra="forbid") first_task_index: NonNegativeInt | None = None @@ -65,16 +65,16 @@ def validate_first_last_indices(cls, values): return values -class JobReadV2(BaseModel): +class JobRead(BaseModel): id: int project_id: int | None = None - project_dump: ProjectDumpV2 + project_dump: ProjectDump user_email: str slurm_account: str | None = None workflow_id: int | None = None - workflow_dump: WorkflowDumpV2 + workflow_dump: WorkflowDump dataset_id: int | None = None - dataset_dump: DatasetDumpV2 + dataset_dump: DatasetDump start_timestamp: AwareDatetime end_timestamp: AwareDatetime | None = None status: str @@ -100,7 +100,7 @@ def serialize_datetime_end(v: datetime | None) -> str | None: return v.isoformat() -class JobUpdateV2(BaseModel): +class JobUpdate(BaseModel): model_config = ConfigDict(extra="forbid") - status: JobStatusTypeV2 + status: JobStatusType diff --git a/fractal_server/app/schemas/v2/manifest.py b/fractal_server/app/schemas/v2/manifest.py index 73653c764b..abea66f921 100644 --- a/fractal_server/app/schemas/v2/manifest.py +++ b/fractal_server/app/schemas/v2/manifest.py @@ -11,9 +11,9 @@ from .task import TaskType -class TaskManifestV2(BaseModel): +class TaskManifest(BaseModel): """ - Represents a task within a V2 manifest. + Represents a task within a manifest. Attributes: name: @@ -65,24 +65,24 @@ def validate_executable_args_meta(self): executable_parallel = self.executable_parallel if (executable_non_parallel is None) and (executable_parallel is None): raise ValueError( - "`TaskManifestV2.executable_non_parallel` and " - "`TaskManifestV2.executable_parallel` cannot be both None." + "`TaskManifest.executable_non_parallel` and " + "`TaskManifest.executable_parallel` cannot be both None." ) elif executable_non_parallel is None: meta_non_parallel = self.meta_non_parallel if meta_non_parallel != {}: raise ValueError( - "`TaskManifestV2.meta_non_parallel` must be an empty dict " - "if `TaskManifestV2.executable_non_parallel` is None. " + "`TaskManifest.meta_non_parallel` must be an empty dict " + "if `TaskManifest.executable_non_parallel` is None. " f"Given: {meta_non_parallel}." ) args_schema_non_parallel = self.args_schema_non_parallel if args_schema_non_parallel is not None: raise ValueError( - "`TaskManifestV2.args_schema_non_parallel` must be None " - "if `TaskManifestV2.executable_non_parallel` is None. " + "`TaskManifest.args_schema_non_parallel` must be None " + "if `TaskManifest.executable_non_parallel` is None. " f"Given: {args_schema_non_parallel}." ) @@ -90,23 +90,23 @@ def validate_executable_args_meta(self): meta_parallel = self.meta_parallel if meta_parallel != {}: raise ValueError( - "`TaskManifestV2.meta_parallel` must be an empty dict if " - "`TaskManifestV2.executable_parallel` is None. " + "`TaskManifest.meta_parallel` must be an empty dict if " + "`TaskManifest.executable_parallel` is None. " f"Given: {meta_parallel}." ) args_schema_parallel = self.args_schema_parallel if args_schema_parallel is not None: raise ValueError( - "`TaskManifestV2.args_schema_parallel` must be None if " - "`TaskManifestV2.executable_parallel` is None. " + "`TaskManifest.args_schema_parallel` must be None if " + "`TaskManifest.executable_parallel` is None. " f"Given: {args_schema_parallel}." ) return self -class ManifestV2(BaseModel): +class Manifest(BaseModel): """ Packages containing tasks are required to include a special file `__FRACTAL_MANIFEST__.json` in order to be discovered and used by Fractal. @@ -131,7 +131,7 @@ class ManifestV2(BaseModel): """ manifest_version: Literal["2"] - task_list: list[TaskManifestV2] + task_list: list[TaskManifest] has_args_schemas: bool = False args_schema_version: str | None = None authors: NonEmptyStr | None = None diff --git a/fractal_server/app/schemas/v2/project.py b/fractal_server/app/schemas/v2/project.py index d31f3bbe8e..d64a9fa349 100644 --- a/fractal_server/app/schemas/v2/project.py +++ b/fractal_server/app/schemas/v2/project.py @@ -8,13 +8,13 @@ from fractal_server.types import NonEmptyStr -class ProjectCreateV2(BaseModel): +class ProjectCreate(BaseModel): model_config = ConfigDict(extra="forbid") name: NonEmptyStr -class ProjectReadV2(BaseModel): +class ProjectRead(BaseModel): id: int name: str timestamp_created: AwareDatetime @@ -24,7 +24,7 @@ def serialize_datetime(v: datetime) -> str: return v.isoformat() -class ProjectUpdateV2(BaseModel): +class ProjectUpdate(BaseModel): model_config = ConfigDict(extra="forbid") name: NonEmptyStr = None diff --git a/fractal_server/app/schemas/v2/status_legacy.py b/fractal_server/app/schemas/v2/status_legacy.py index 8cd63e46f0..30da018953 100644 --- a/fractal_server/app/schemas/v2/status_legacy.py +++ b/fractal_server/app/schemas/v2/status_legacy.py @@ -4,7 +4,7 @@ from pydantic import Field -class WorkflowTaskStatusTypeV2(StrEnum): +class WorkflowTaskStatusType(StrEnum): """ Define the available values for the status of a `WorkflowTask`. @@ -23,7 +23,7 @@ class WorkflowTaskStatusTypeV2(StrEnum): FAILED = "failed" -class LegacyStatusReadV2(BaseModel): +class LegacyStatusRead(BaseModel): """ Response type for the `/project/{project_id}/status/` endpoint @@ -31,5 +31,5 @@ class LegacyStatusReadV2(BaseModel): status: dict[ str, - WorkflowTaskStatusTypeV2, + WorkflowTaskStatusType, ] = Field(default_factory=dict) diff --git a/fractal_server/app/schemas/v2/task.py b/fractal_server/app/schemas/v2/task.py index 28c2df5bdc..9c25efcf7b 100644 --- a/fractal_server/app/schemas/v2/task.py +++ b/fractal_server/app/schemas/v2/task.py @@ -29,7 +29,7 @@ class TaskType(StrEnum): PARALLEL = "parallel" -class TaskCreateV2(BaseModel): +class TaskCreate(BaseModel): model_config = ConfigDict(extra="forbid") name: NonEmptyStr @@ -90,7 +90,7 @@ def set_task_type(self): return self -class TaskReadV2(BaseModel): +class TaskRead(BaseModel): id: int name: str type: TaskType @@ -117,7 +117,7 @@ class TaskReadV2(BaseModel): tags: list[str] -class TaskUpdateV2(BaseModel): +class TaskUpdate(BaseModel): model_config = ConfigDict(extra="forbid") command_parallel: NonEmptyStr = None @@ -131,7 +131,7 @@ class TaskUpdateV2(BaseModel): tags: ListUniqueNonEmptyString | None = None -class TaskImportV2(BaseModel): +class TaskImport(BaseModel): model_config = ConfigDict(extra="forbid") pkg_name: NonEmptyStr @@ -139,11 +139,11 @@ class TaskImportV2(BaseModel): name: NonEmptyStr -class TaskImportV2Legacy(BaseModel): +class TaskImportLegacy(BaseModel): source: NonEmptyStr -class TaskExportV2(BaseModel): +class TaskExport(BaseModel): pkg_name: NonEmptyStr version: NonEmptyStr | None = None name: NonEmptyStr diff --git a/fractal_server/app/schemas/v2/task_collection.py b/fractal_server/app/schemas/v2/task_collection.py index ac2c77d2e5..bc6da20861 100644 --- a/fractal_server/app/schemas/v2/task_collection.py +++ b/fractal_server/app/schemas/v2/task_collection.py @@ -5,7 +5,7 @@ from pydantic import field_validator from pydantic import model_validator -from fractal_server.app.schemas.v2 import ManifestV2 +from fractal_server.app.schemas.v2 import Manifest from fractal_server.string_tools import validate_cmd from fractal_server.types import AbsolutePathStr from fractal_server.types import DictStrStr @@ -21,9 +21,9 @@ class FractalUploadedFile(BaseModel): contents: bytes -class TaskCollectPipV2(BaseModel): +class TaskCollectPip(BaseModel): """ - TaskCollectPipV2 class + TaskCollectPip class This class only encodes the attributes required to trigger a task-collection operation. Other attributes (that are assigned *during* @@ -91,7 +91,7 @@ def validate_pinned_package_versions(cls, value): return value -class TaskCollectCustomV2(BaseModel): +class TaskCollectCustom(BaseModel): """ Attributes: manifest: Manifest of a Fractal task package (this is typically the @@ -99,7 +99,7 @@ class TaskCollectCustomV2(BaseModel): python_interpreter: Absolute path to the Python interpreter to be used for running tasks. name: A name identifying this package, that will fill the - `TaskGroupV2.pkg_name` column. + `TaskGroup.pkg_name` column. package_root: The folder where the package is installed. If not provided, it will be extracted via `pip show` (requires `package_name` to be set). @@ -110,7 +110,7 @@ class TaskCollectCustomV2(BaseModel): """ model_config = ConfigDict(extra="forbid") - manifest: ManifestV2 + manifest: Manifest python_interpreter: AbsolutePathStr label: NonEmptyStr package_root: AbsolutePathStr | None = None diff --git a/fractal_server/app/schemas/v2/task_group.py b/fractal_server/app/schemas/v2/task_group.py index aced179d09..9593fcc142 100644 --- a/fractal_server/app/schemas/v2/task_group.py +++ b/fractal_server/app/schemas/v2/task_group.py @@ -7,40 +7,40 @@ from pydantic import field_serializer from pydantic.types import AwareDatetime -from fractal_server.app.schemas.v2.task import TaskReadV2 +from fractal_server.app.schemas.v2.task import TaskRead from fractal_server.types import AbsolutePathStr from fractal_server.types import DictStrStr from fractal_server.types import NonEmptyStr -class TaskGroupV2OriginEnum(StrEnum): +class TaskGroupOriginEnum(StrEnum): PYPI = "pypi" WHEELFILE = "wheel-file" PIXI = "pixi" OTHER = "other" -class TaskGroupActivityStatusV2(StrEnum): +class TaskGroupActivityStatus(StrEnum): PENDING = "pending" ONGOING = "ongoing" FAILED = "failed" OK = "OK" -class TaskGroupActivityActionV2(StrEnum): +class TaskGroupActivityAction(StrEnum): COLLECT = "collect" DEACTIVATE = "deactivate" REACTIVATE = "reactivate" DELETE = "delete" -class TaskGroupCreateV2(BaseModel): +class TaskGroupCreate(BaseModel): model_config = ConfigDict(extra="forbid") user_id: int resource_id: int user_group_id: int | None = None active: bool = True - origin: TaskGroupV2OriginEnum + origin: TaskGroupOriginEnum pkg_name: str version: str | None = None python_version: NonEmptyStr = None @@ -54,9 +54,9 @@ class TaskGroupCreateV2(BaseModel): pinned_package_versions_post: DictStrStr = Field(default_factory=dict) -class TaskGroupCreateV2Strict(TaskGroupCreateV2): +class TaskGroupCreateStrict(TaskGroupCreate): """ - A strict version of TaskGroupCreateV2, to be used for task collection. + A strict version of TaskGroupCreate, to be used for task collection. """ path: AbsolutePathStr @@ -65,14 +65,14 @@ class TaskGroupCreateV2Strict(TaskGroupCreateV2): python_version: NonEmptyStr -class TaskGroupReadV2(BaseModel): +class TaskGroupRead(BaseModel): id: int - task_list: list[TaskReadV2] + task_list: list[TaskRead] user_id: int user_group_id: int | None = None - origin: TaskGroupV2OriginEnum + origin: TaskGroupOriginEnum pkg_name: str version: str | None = None python_version: str | None = None @@ -96,16 +96,16 @@ def serialize_datetime(v: datetime) -> str: return v.isoformat() -class TaskGroupReadSuperuser(TaskGroupReadV2): +class TaskGroupReadSuperuser(TaskGroupRead): resource_id: int -class TaskGroupUpdateV2(BaseModel): +class TaskGroupUpdate(BaseModel): model_config = ConfigDict(extra="forbid") user_group_id: int | None = None -class TaskGroupActivityV2Read(BaseModel): +class TaskGroupActivityRead(BaseModel): id: int user_id: int taskgroupv2_id: int | None = None @@ -113,8 +113,8 @@ class TaskGroupActivityV2Read(BaseModel): timestamp_ended: AwareDatetime | None = None pkg_name: str version: str - status: TaskGroupActivityStatusV2 - action: TaskGroupActivityActionV2 + status: TaskGroupActivityStatus + action: TaskGroupActivityAction log: str | None = None @field_serializer("timestamp_started") diff --git a/fractal_server/app/schemas/v2/workflow.py b/fractal_server/app/schemas/v2/workflow.py index 1ec89cb53e..dd8cedf4a2 100644 --- a/fractal_server/app/schemas/v2/workflow.py +++ b/fractal_server/app/schemas/v2/workflow.py @@ -5,29 +5,29 @@ from pydantic import field_serializer from pydantic.types import AwareDatetime -from fractal_server.app.schemas.v2.project import ProjectReadV2 -from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskExportV2 -from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskImportV2 -from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskReadV2 +from fractal_server.app.schemas.v2.project import ProjectRead +from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskExport +from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskImport +from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskRead from fractal_server.app.schemas.v2.workflowtask import ( - WorkflowTaskReadV2WithWarning, + WorkflowTaskReadWithWarning, ) from fractal_server.types import ListUniqueNonNegativeInt from fractal_server.types import NonEmptyStr -class WorkflowCreateV2(BaseModel): +class WorkflowCreate(BaseModel): model_config = ConfigDict(extra="forbid") name: NonEmptyStr -class WorkflowReadV2(BaseModel): +class WorkflowRead(BaseModel): id: int name: str project_id: int - task_list: list[WorkflowTaskReadV2] - project: ProjectReadV2 + task_list: list[WorkflowTaskRead] + project: ProjectRead timestamp_created: AwareDatetime @field_serializer("timestamp_created") @@ -35,18 +35,18 @@ def serialize_datetime(v: datetime) -> str: return v.isoformat() -class WorkflowReadV2WithWarnings(WorkflowReadV2): - task_list: list[WorkflowTaskReadV2WithWarning] +class WorkflowReadWithWarnings(WorkflowRead): + task_list: list[WorkflowTaskReadWithWarning] -class WorkflowUpdateV2(BaseModel): +class WorkflowUpdate(BaseModel): model_config = ConfigDict(extra="forbid") name: NonEmptyStr = None reordered_workflowtask_ids: ListUniqueNonNegativeInt | None = None -class WorkflowImportV2(BaseModel): +class WorkflowImport(BaseModel): """ Class for `Workflow` import. @@ -56,10 +56,10 @@ class WorkflowImportV2(BaseModel): model_config = ConfigDict(extra="forbid") name: NonEmptyStr - task_list: list[WorkflowTaskImportV2] + task_list: list[WorkflowTaskImport] -class WorkflowExportV2(BaseModel): +class WorkflowExport(BaseModel): """ Class for `Workflow` export. @@ -68,4 +68,4 @@ class WorkflowExportV2(BaseModel): """ name: str - task_list: list[WorkflowTaskExportV2] + task_list: list[WorkflowTaskExport] diff --git a/fractal_server/app/schemas/v2/workflowtask.py b/fractal_server/app/schemas/v2/workflowtask.py index 5776dfcd04..f67ae6d5bf 100644 --- a/fractal_server/app/schemas/v2/workflowtask.py +++ b/fractal_server/app/schemas/v2/workflowtask.py @@ -9,14 +9,14 @@ from fractal_server.types import TypeFilters from fractal_server.types import WorkflowTaskArgument -from .task import TaskExportV2 -from .task import TaskImportV2 -from .task import TaskImportV2Legacy -from .task import TaskReadV2 +from .task import TaskExport +from .task import TaskImport +from .task import TaskImportLegacy +from .task import TaskRead from .task import TaskType -class WorkflowTaskCreateV2(BaseModel): +class WorkflowTaskCreate(BaseModel): model_config = ConfigDict(extra="forbid") meta_non_parallel: DictStrAny | None = None @@ -26,14 +26,14 @@ class WorkflowTaskCreateV2(BaseModel): type_filters: TypeFilters = Field(default_factory=dict) -class WorkflowTaskReplaceV2(BaseModel): +class WorkflowTaskReplace(BaseModel): """Used by 'replace-task' endpoint""" args_non_parallel: dict[str, Any] | None = None args_parallel: dict[str, Any] | None = None -class WorkflowTaskReadV2(BaseModel): +class WorkflowTaskRead(BaseModel): id: int workflow_id: int @@ -48,14 +48,14 @@ class WorkflowTaskReadV2(BaseModel): task_type: TaskType task_id: int - task: TaskReadV2 + task: TaskRead -class WorkflowTaskReadV2WithWarning(WorkflowTaskReadV2): +class WorkflowTaskReadWithWarning(WorkflowTaskRead): warning: str | None = None -class WorkflowTaskUpdateV2(BaseModel): +class WorkflowTaskUpdate(BaseModel): model_config = ConfigDict(extra="forbid") meta_non_parallel: DictStrAny | None = None @@ -65,7 +65,7 @@ class WorkflowTaskUpdateV2(BaseModel): type_filters: TypeFilters = None -class WorkflowTaskImportV2(BaseModel): +class WorkflowTaskImport(BaseModel): model_config = ConfigDict(extra="forbid") meta_non_parallel: DictStrAny | None = None @@ -75,7 +75,7 @@ class WorkflowTaskImportV2(BaseModel): type_filters: TypeFilters | None = None input_filters: dict[str, Any] | None = None - task: TaskImportV2 | TaskImportV2Legacy + task: TaskImport | TaskImportLegacy @model_validator(mode="before") @classmethod @@ -106,11 +106,11 @@ def update_legacy_filters(cls, values: dict): return values -class WorkflowTaskExportV2(BaseModel): +class WorkflowTaskExport(BaseModel): meta_non_parallel: dict[str, Any] | None = None meta_parallel: dict[str, Any] | None = None args_non_parallel: dict[str, Any] | None = None args_parallel: dict[str, Any] | None = None type_filters: dict[str, bool] = Field(default_factory=dict) - task: TaskExportV2 + task: TaskExport diff --git a/fractal_server/app/shutdown.py b/fractal_server/app/shutdown.py index 0031e6ed3c..9caf488772 100644 --- a/fractal_server/app/shutdown.py +++ b/fractal_server/app/shutdown.py @@ -4,7 +4,7 @@ from fractal_server.app.db import get_async_db from fractal_server.app.models.v2 import JobV2 -from fractal_server.app.models.v2.job import JobStatusTypeV2 +from fractal_server.app.models.v2.job import JobStatusType from fractal_server.app.routes.aux._job import _write_shutdown_file from fractal_server.config import get_settings from fractal_server.logger import get_logger @@ -18,12 +18,12 @@ async def cleanup_after_shutdown(*, jobsV2: list[int], logger_name: str): stm_objects = ( select(JobV2) .where(JobV2.id.in_(jobsV2)) - .where(JobV2.status == JobStatusTypeV2.SUBMITTED) + .where(JobV2.status == JobStatusType.SUBMITTED) ) stm_ids = ( select(JobV2.id) .where(JobV2.id.in_(jobsV2)) - .where(JobV2.status == JobStatusTypeV2.SUBMITTED) + .where(JobV2.status == JobStatusType.SUBMITTED) ) async for session in get_async_db(): diff --git a/fractal_server/data_migrations/old/2_0_3.py b/fractal_server/data_migrations/old/2_0_3.py index a0e35de50a..00461402cf 100644 --- a/fractal_server/data_migrations/old/2_0_3.py +++ b/fractal_server/data_migrations/old/2_0_3.py @@ -9,7 +9,7 @@ import fractal_server from fractal_server.app.db import get_sync_db from fractal_server.app.models.v2 import JobV2 -from fractal_server.app.schemas.v2 import JobReadV2 +from fractal_server.app.schemas.v2 import JobRead def fix_db(): @@ -73,6 +73,6 @@ def fix_db(): logger.warning( f"Now validating jobv2.id={job_v2.id} with JobReadV2." ) - JobReadV2(**job_v2.model_dump()) + JobRead(**job_v2.model_dump()) logger.warning("END of execution of fix_db function") diff --git a/fractal_server/data_migrations/old/2_11_0.py b/fractal_server/data_migrations/old/2_11_0.py index 1df53e7b43..13dc930d35 100644 --- a/fractal_server/data_migrations/old/2_11_0.py +++ b/fractal_server/data_migrations/old/2_11_0.py @@ -9,11 +9,11 @@ from fractal_server.app.models import ProjectV2 from fractal_server.app.models import WorkflowTaskV2 from fractal_server.app.models import WorkflowV2 -from fractal_server.app.schemas.v2 import DatasetReadV2 -from fractal_server.app.schemas.v2 import JobReadV2 -from fractal_server.app.schemas.v2 import ProjectReadV2 -from fractal_server.app.schemas.v2 import TaskReadV2 -from fractal_server.app.schemas.v2 import WorkflowTaskReadV2 +from fractal_server.app.schemas.v2 import DatasetRead +from fractal_server.app.schemas.v2 import JobRead +from fractal_server.app.schemas.v2 import ProjectRead +from fractal_server.app.schemas.v2 import TaskRead +from fractal_server.app.schemas.v2 import WorkflowTaskRead from fractal_server.types import AttributeFilters logger = logging.getLogger("fix_db") @@ -79,9 +79,9 @@ def fix_db(): ]["input_filters"]["types"] ds.history[i]["workflowtask"].pop("input_filters") flag_modified(ds, "history") - DatasetReadV2( + DatasetRead( **ds.model_dump(), - project=ProjectReadV2(**ds.project.model_dump()), + project=ProjectRead(**ds.project.model_dump()), ) db.add(ds) logger.info(f"DatasetV2[{ds.id}] END - fixed filters") @@ -115,9 +115,9 @@ def fix_db(): ) wft.input_filters = None flag_modified(wft, "input_filters") - WorkflowTaskReadV2( + WorkflowTaskRead( **wft.model_dump(), - task=TaskReadV2(**wft.task.model_dump()), + task=TaskRead(**wft.task.model_dump()), ) db.add(wft) logger.info(f"WorkflowTaskV2[{wft.id}] END - fixed filters") @@ -157,7 +157,7 @@ def fix_db(): ) job.dataset_dump.pop("filters") flag_modified(job, "dataset_dump") - JobReadV2(**job.model_dump()) + JobRead(**job.model_dump()) db.add(job) logger.info(f"JobV2[{job.id}] END - fixed filters") diff --git a/fractal_server/json_schemas/generate_manifest_v2.py b/fractal_server/json_schemas/generate_manifest_v2.py index 2ec53859e0..1c45fc6dc3 100644 --- a/fractal_server/json_schemas/generate_manifest_v2.py +++ b/fractal_server/json_schemas/generate_manifest_v2.py @@ -3,7 +3,7 @@ import fractal_server.app.schemas.v2 as v2 -new_schema = v2.manifest.ManifestV2.model_json_schema() +new_schema = v2.manifest.Manifest.model_json_schema() json_schema_path = ( Path(v2.__file__).parents[3] / "json_schemas/manifest_v2.json" ) diff --git a/fractal_server/runner/v2/runner.py b/fractal_server/runner/v2/runner.py index b7c05175a7..73f3bfdad6 100644 --- a/fractal_server/runner/v2/runner.py +++ b/fractal_server/runner/v2/runner.py @@ -17,8 +17,8 @@ from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.models.v2 import WorkflowTaskV2 from fractal_server.app.schemas.v2 import HistoryUnitStatus -from fractal_server.app.schemas.v2 import TaskDumpV2 -from fractal_server.app.schemas.v2 import TaskGroupDumpV2 +from fractal_server.app.schemas.v2 import TaskDump +from fractal_server.app.schemas.v2 import TaskGroupDump from fractal_server.app.schemas.v2 import TaskType from fractal_server.images import SingleImage from fractal_server.images.status_tools import IMAGE_STATUS_KEY @@ -165,10 +165,10 @@ def execute_tasks_v2( # Create dumps for workflowtask and taskgroup workflowtask_dump = dict( **wftask.model_dump(exclude={"task"}), - task=TaskDumpV2(**wftask.task.model_dump()).model_dump(), + task=TaskDump(**wftask.task.model_dump()).model_dump(), ) task_group = db.get(TaskGroupV2, wftask.task.taskgroupv2_id) - task_group_dump = TaskGroupDumpV2( + task_group_dump = TaskGroupDump( **task_group.model_dump() ).model_dump() # Create HistoryRun diff --git a/fractal_server/runner/v2/submit_workflow.py b/fractal_server/runner/v2/submit_workflow.py index 46a1368bbf..d3f9135b5a 100644 --- a/fractal_server/runner/v2/submit_workflow.py +++ b/fractal_server/runner/v2/submit_workflow.py @@ -20,7 +20,7 @@ from fractal_server.app.models.v2 import Profile from fractal_server.app.models.v2 import Resource from fractal_server.app.models.v2 import WorkflowV2 -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType from fractal_server.app.schemas.v2 import ResourceType from fractal_server.logger import get_logger from fractal_server.logger import reset_logger_handlers @@ -71,7 +71,7 @@ def fail_job( logger.error(log_msg) reset_logger_handlers(logger) job = db.get(JobV2, job.id) # refetch, in case it was updated - job.status = JobStatusTypeV2.FAILED + job.status = JobStatusType.FAILED job.end_timestamp = get_timestamp() job.log = log_msg db.merge(job) @@ -273,7 +273,7 @@ def submit_workflow( # Update job DB entry with next(DB.get_sync_db()) as db_sync: job = db_sync.get(JobV2, job_id) - job.status = JobStatusTypeV2.DONE + job.status = JobStatusType.DONE job.end_timestamp = get_timestamp() with log_file_path.open("r") as f: logs = f.read() diff --git a/fractal_server/tasks/v2/local/_utils.py b/fractal_server/tasks/v2/local/_utils.py index c5e08c3da4..67da0b8504 100644 --- a/fractal_server/tasks/v2/local/_utils.py +++ b/fractal_server/tasks/v2/local/_utils.py @@ -1,7 +1,7 @@ from pathlib import Path from fractal_server.app.models import Resource -from fractal_server.app.schemas.v2 import TaskCreateV2 +from fractal_server.app.schemas.v2 import TaskCreate from fractal_server.logger import get_logger from fractal_server.logger import set_logger from fractal_server.tasks.v2.utils_pixi import simplify_pyproject_toml @@ -51,7 +51,7 @@ def _customize_and_run_template( return stdout -def check_task_files_exist(task_list: list[TaskCreateV2]) -> None: +def check_task_files_exist(task_list: list[TaskCreate]) -> None: """ Check that the modules listed in task commands point to existing files. diff --git a/fractal_server/tasks/v2/local/collect.py b/fractal_server/tasks/v2/local/collect.py index f59c87e833..c649469e65 100644 --- a/fractal_server/tasks/v2/local/collect.py +++ b/fractal_server/tasks/v2/local/collect.py @@ -9,9 +9,9 @@ from fractal_server.app.models import Resource from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.schemas.v2 import FractalUploadedFile -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2.manifest import ManifestV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2.manifest import Manifest from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.tasks.utils import get_log_path @@ -132,14 +132,13 @@ def collect_local( Path(task_group.path) / SCRIPTS_SUBFOLDER ).as_posix(), prefix=( - f"{int(time.time())}_" - f"{TaskGroupActivityActionV2.COLLECT}" + f"{int(time.time())}_{TaskGroupActivityAction.COLLECT}" ), logger_name=LOGGER_NAME, ) # Set status to ONGOING and refresh logs - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity.log = get_current_log(log_file_path) activity = add_commit_refresh(obj=activity, db=db) @@ -216,7 +215,7 @@ def collect_local( pkg_manifest_dict = json.load(json_data) logger.info(f"loaded {manifest_path=}") logger.info("now validating manifest content") - pkg_manifest = ManifestV2(**pkg_manifest_dict) + pkg_manifest = Manifest(**pkg_manifest_dict) logger.info("validated manifest content") activity.log = get_current_log(log_file_path) activity = add_commit_refresh(obj=activity, db=db) @@ -257,7 +256,7 @@ def collect_local( # Finalize (write metadata to DB) logger.info("finalising - START") - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) logger.info("finalising - END") diff --git a/fractal_server/tasks/v2/local/collect_pixi.py b/fractal_server/tasks/v2/local/collect_pixi.py index b6fbd4e9a4..fb1049879a 100644 --- a/fractal_server/tasks/v2/local/collect_pixi.py +++ b/fractal_server/tasks/v2/local/collect_pixi.py @@ -8,9 +8,9 @@ from fractal_server.app.models import Profile from fractal_server.app.models import Resource from fractal_server.app.schemas.v2 import FractalUploadedFile -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2.manifest import ManifestV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2.manifest import Manifest from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.tasks.utils import get_log_path @@ -132,13 +132,12 @@ def collect_local_pixi( task_group.path, SCRIPTS_SUBFOLDER ).as_posix(), prefix=( - f"{int(time.time())}_" - f"{TaskGroupActivityActionV2.COLLECT}" + f"{int(time.time())}_{TaskGroupActivityAction.COLLECT}" ), logger_name=LOGGER_NAME, ) - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity.log = get_current_log(log_file_path) activity = add_commit_refresh(obj=activity, db=db) @@ -195,7 +194,7 @@ def collect_local_pixi( pkg_manifest_dict = json.load(json_data) logger.info(f"loaded {manifest_path=}") logger.info("now validating manifest content") - pkg_manifest = ManifestV2(**pkg_manifest_dict) + pkg_manifest = Manifest(**pkg_manifest_dict) logger.info("validated manifest content") activity.log = get_current_log(log_file_path) activity = add_commit_refresh(obj=activity, db=db) @@ -241,7 +240,7 @@ def collect_local_pixi( # Finalize (write metadata to DB) logger.info("finalising - START") - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) logger.info("finalising - END") diff --git a/fractal_server/tasks/v2/local/deactivate.py b/fractal_server/tasks/v2/local/deactivate.py index 9614730fd4..7f9d950c1e 100644 --- a/fractal_server/tasks/v2/local/deactivate.py +++ b/fractal_server/tasks/v2/local/deactivate.py @@ -6,9 +6,9 @@ from fractal_server.app.db import get_sync_db from fractal_server.app.models import Profile from fractal_server.app.models import Resource -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatus from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.tasks.utils import FORBIDDEN_DEPENDENCY_STRINGS @@ -78,7 +78,7 @@ def deactivate_local( return try: - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity = add_commit_refresh(obj=activity, db=db) if task_group.env_info is None: @@ -102,7 +102,7 @@ def deactivate_local( ).as_posix(), prefix=( f"{int(time.time())}_" - f"{TaskGroupActivityActionV2.DEACTIVATE}" + f"{TaskGroupActivityAction.DEACTIVATE}" ), logger_name=LOGGER_NAME, ) @@ -120,7 +120,7 @@ def deactivate_local( logger.info("Add pip freeze stdout to TaskGroupV2 - end") # Handle some specific cases for wheel-file case - if task_group.origin == TaskGroupV2OriginEnum.WHEELFILE: + if task_group.origin == TaskGroupOriginEnum.WHEELFILE: logger.info( f"Handle specific cases for {task_group.origin=}." ) @@ -209,7 +209,7 @@ def deactivate_local( logger.info(f"Now removing {task_group.venv_path}.") shutil.rmtree(task_group.venv_path) logger.info(f"All good, {task_group.venv_path} removed.") - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.log = get_current_log(log_file_path) activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) diff --git a/fractal_server/tasks/v2/local/deactivate_pixi.py b/fractal_server/tasks/v2/local/deactivate_pixi.py index 3bbac19dc1..609b013d4b 100644 --- a/fractal_server/tasks/v2/local/deactivate_pixi.py +++ b/fractal_server/tasks/v2/local/deactivate_pixi.py @@ -5,7 +5,7 @@ from fractal_server.app.db import get_sync_db from fractal_server.app.models import Profile from fractal_server.app.models import Resource -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatus from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.tasks.utils import get_log_path @@ -70,7 +70,7 @@ def deactivate_local_pixi( return try: - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity = add_commit_refresh(obj=activity, db=db) # Actually mark the task group as non-active @@ -82,7 +82,7 @@ def deactivate_local_pixi( logger.info(f"Now removing '{source_dir.as_posix()}'.") shutil.rmtree(source_dir) logger.info(f"All good, '{source_dir.as_posix()}' removed.") - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.log = get_current_log(log_file_path) activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) diff --git a/fractal_server/tasks/v2/local/delete.py b/fractal_server/tasks/v2/local/delete.py index 9aade35c5f..23f8421182 100644 --- a/fractal_server/tasks/v2/local/delete.py +++ b/fractal_server/tasks/v2/local/delete.py @@ -5,8 +5,8 @@ from fractal_server.app.db import get_sync_db from fractal_server.app.models import Profile from fractal_server.app.models import Resource -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.tasks.utils import get_log_path @@ -45,7 +45,7 @@ def delete_local( return try: - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity.log = get_current_log(log_file_path) activity = add_commit_refresh(obj=activity, db=db) @@ -53,12 +53,12 @@ def delete_local( db.commit() logger.debug("Task group removed from database.") - if task_group.origin != TaskGroupV2OriginEnum.OTHER: + if task_group.origin != TaskGroupOriginEnum.OTHER: logger.debug(f"Removing {task_group.path=}.") shutil.rmtree(task_group.path) logger.debug(f"{task_group.path=} removed.") - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.log = get_current_log(log_file_path) activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) diff --git a/fractal_server/tasks/v2/local/reactivate.py b/fractal_server/tasks/v2/local/reactivate.py index 0e811c72d4..e6a4769f2f 100644 --- a/fractal_server/tasks/v2/local/reactivate.py +++ b/fractal_server/tasks/v2/local/reactivate.py @@ -6,8 +6,8 @@ from fractal_server.app.db import get_sync_db from fractal_server.app.models import Profile from fractal_server.app.models import Resource -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatus from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.tasks.utils import get_log_path @@ -79,7 +79,7 @@ def reactivate_local( return try: - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity = add_commit_refresh(obj=activity, db=db) # Prepare replacements for templates @@ -105,7 +105,7 @@ def reactivate_local( ).as_posix(), prefix=( f"{int(time.time())}_" - f"{TaskGroupActivityActionV2.REACTIVATE}" + f"{TaskGroupActivityAction.REACTIVATE}" ), logger_name=LOGGER_NAME, ) @@ -126,7 +126,7 @@ def reactivate_local( ) logger.debug("end - install from pip freeze") activity.log = get_current_log(log_file_path) - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) task_group.active = True diff --git a/fractal_server/tasks/v2/local/reactivate_pixi.py b/fractal_server/tasks/v2/local/reactivate_pixi.py index 28d0904f3e..e31aeae1d4 100644 --- a/fractal_server/tasks/v2/local/reactivate_pixi.py +++ b/fractal_server/tasks/v2/local/reactivate_pixi.py @@ -6,8 +6,8 @@ from fractal_server.app.db import get_sync_db from fractal_server.app.models import Profile from fractal_server.app.models import Resource -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatus from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.tasks.utils import get_log_path @@ -77,7 +77,7 @@ def reactivate_local_pixi( return try: - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity = add_commit_refresh(obj=activity, db=db) common_args = dict( @@ -126,7 +126,7 @@ def reactivate_local_pixi( ).as_posix(), prefix=( f"{int(time.time())}_" - f"{TaskGroupActivityActionV2.REACTIVATE}" + f"{TaskGroupActivityAction.REACTIVATE}" ), logger_name=LOGGER_NAME, ) @@ -176,7 +176,7 @@ def reactivate_local_pixi( ) activity.log = get_current_log(log_file_path) - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) task_group.active = True diff --git a/fractal_server/tasks/v2/ssh/collect.py b/fractal_server/tasks/v2/ssh/collect.py index 5d13bd6de6..53d5e5d247 100644 --- a/fractal_server/tasks/v2/ssh/collect.py +++ b/fractal_server/tasks/v2/ssh/collect.py @@ -6,9 +6,9 @@ from fractal_server.app.models import Profile from fractal_server.app.models import Resource from fractal_server.app.schemas.v2 import FractalUploadedFile -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2.manifest import ManifestV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2.manifest import Manifest from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.ssh._fabric import SingleUseFractalSSH @@ -166,7 +166,7 @@ def collect_ssh( script_dir_remote=script_dir_remote, prefix=( f"{int(time.time())}_" - f"{TaskGroupActivityActionV2.COLLECT}" + f"{TaskGroupActivityAction.COLLECT}" ), fractal_ssh=fractal_ssh, logger_name=LOGGER_NAME, @@ -175,7 +175,7 @@ def collect_ssh( logger.info("installing - START") # Set status to ONGOING and refresh logs - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity.log = get_current_log(log_file_path) activity = add_commit_refresh(obj=activity, db=db) @@ -254,7 +254,7 @@ def collect_ssh( manifest_path_remote ) logger.info(f"Loaded {manifest_path_remote=}") - pkg_manifest = ManifestV2(**pkg_manifest_dict) + pkg_manifest = Manifest(**pkg_manifest_dict) logger.info("Manifest is a valid ManifestV2") logger.info("_prepare_tasks_metadata - start") @@ -290,7 +290,7 @@ def collect_ssh( # Finalize (write metadata to DB) logger.info("finalising - START") - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) logger.info("finalising - END") diff --git a/fractal_server/tasks/v2/ssh/collect_pixi.py b/fractal_server/tasks/v2/ssh/collect_pixi.py index c96789a896..53b538a5bf 100644 --- a/fractal_server/tasks/v2/ssh/collect_pixi.py +++ b/fractal_server/tasks/v2/ssh/collect_pixi.py @@ -6,9 +6,9 @@ from fractal_server.app.models import Profile from fractal_server.app.models import Resource from fractal_server.app.schemas.v2 import FractalUploadedFile -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2.manifest import ManifestV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2.manifest import Manifest from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.ssh._fabric import SingleUseFractalSSH @@ -184,7 +184,7 @@ def collect_ssh_pixi( logger.info("installing - START") # Set status to ONGOING and refresh logs - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity.log = get_current_log(log_file_path) activity = add_commit_refresh(obj=activity, db=db) @@ -195,7 +195,7 @@ def collect_ssh_pixi( script_dir_remote=script_dir_remote, prefix=( f"{int(time.time())}_" - f"{TaskGroupActivityActionV2.COLLECT}" + f"{TaskGroupActivityAction.COLLECT}" ), logger_name=LOGGER_NAME, fractal_ssh=fractal_ssh, @@ -280,7 +280,7 @@ def collect_ssh_pixi( manifest_path_remote ) logger.info(f"Loaded {manifest_path_remote=}") - pkg_manifest = ManifestV2(**pkg_manifest_dict) + pkg_manifest = Manifest(**pkg_manifest_dict) logger.info("Manifest is a valid ManifestV2") logger.info("_prepare_tasks_metadata - start") @@ -327,7 +327,7 @@ def collect_ssh_pixi( # Finalize (write metadata to DB) logger.info("finalising - START") - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) logger.info("finalising - END") diff --git a/fractal_server/tasks/v2/ssh/deactivate.py b/fractal_server/tasks/v2/ssh/deactivate.py index 4c824bdbe3..340a23eb88 100644 --- a/fractal_server/tasks/v2/ssh/deactivate.py +++ b/fractal_server/tasks/v2/ssh/deactivate.py @@ -5,9 +5,9 @@ from fractal_server.app.db import get_sync_db from fractal_server.app.models import Profile from fractal_server.app.models import Resource -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatus from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.ssh._fabric import SingleUseFractalSSH @@ -101,7 +101,7 @@ def deactivate_ssh( ) return - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity = add_commit_refresh(obj=activity, db=db) if task_group.env_info is None: @@ -135,7 +135,7 @@ def deactivate_ssh( script_dir_remote=script_dir_remote, prefix=( f"{int(time.time())}_" - f"{TaskGroupActivityActionV2.DEACTIVATE}" + f"{TaskGroupActivityAction.DEACTIVATE}" ), fractal_ssh=fractal_ssh, logger_name=LOGGER_NAME, @@ -160,7 +160,7 @@ def deactivate_ssh( ) # Handle some specific cases for wheel-file case - if task_group.origin == TaskGroupV2OriginEnum.WHEELFILE: + if task_group.origin == TaskGroupOriginEnum.WHEELFILE: logger.info( f"Handle specific cases for {task_group.origin=}." ) @@ -264,7 +264,7 @@ def deactivate_ssh( safe_root=profile.tasks_remote_dir, ) logger.info(f"All good, {task_group.venv_path} removed.") - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.log = get_current_log(log_file_path) activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) diff --git a/fractal_server/tasks/v2/ssh/deactivate_pixi.py b/fractal_server/tasks/v2/ssh/deactivate_pixi.py index f666d90b3e..315e9b4276 100644 --- a/fractal_server/tasks/v2/ssh/deactivate_pixi.py +++ b/fractal_server/tasks/v2/ssh/deactivate_pixi.py @@ -4,7 +4,7 @@ from fractal_server.app.db import get_sync_db from fractal_server.app.models import Profile from fractal_server.app.models import Resource -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatus from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.ssh._fabric import SingleUseFractalSSH @@ -109,7 +109,7 @@ def deactivate_ssh_pixi( safe_root=profile.tasks_remote_dir, ) logger.info(f"All good, {source_dir} removed.") - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.log = get_current_log(log_file_path) activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) diff --git a/fractal_server/tasks/v2/ssh/delete.py b/fractal_server/tasks/v2/ssh/delete.py index 42206e7aa1..94ec41d4fa 100644 --- a/fractal_server/tasks/v2/ssh/delete.py +++ b/fractal_server/tasks/v2/ssh/delete.py @@ -4,8 +4,8 @@ from fractal_server.app.db import get_sync_db from fractal_server.app.models import Profile from fractal_server.app.models import Resource -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.ssh._fabric import SingleUseFractalSSH @@ -80,7 +80,7 @@ def delete_ssh( if not ssh_ok: return - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity.log = get_current_log(log_file_path) activity = add_commit_refresh(obj=activity, db=db) @@ -88,7 +88,7 @@ def delete_ssh( db.commit() logger.debug("Task group removed from database.") - if task_group.origin != TaskGroupV2OriginEnum.OTHER: + if task_group.origin != TaskGroupOriginEnum.OTHER: logger.debug( f"Removing remote {task_group.path=} " f"(with {profile.tasks_remote_dir=})." @@ -99,7 +99,7 @@ def delete_ssh( ) logger.debug(f"Remote {task_group.path=} removed.") - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.log = get_current_log(log_file_path) activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) diff --git a/fractal_server/tasks/v2/ssh/reactivate.py b/fractal_server/tasks/v2/ssh/reactivate.py index b518f89bb9..1a4cc76968 100644 --- a/fractal_server/tasks/v2/ssh/reactivate.py +++ b/fractal_server/tasks/v2/ssh/reactivate.py @@ -5,8 +5,8 @@ from fractal_server.app.db import get_sync_db from fractal_server.app.models import Profile from fractal_server.app.models import Resource -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatus from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.ssh._fabric import SingleUseFractalSSH @@ -103,7 +103,7 @@ def reactivate_ssh( ) return - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity = add_commit_refresh(obj=activity, db=db) # Prepare replacements for templates @@ -147,7 +147,7 @@ def reactivate_ssh( script_dir_remote=script_dir_remote, prefix=( f"{int(time.time())}_" - f"{TaskGroupActivityActionV2.REACTIVATE}" + f"{TaskGroupActivityAction.REACTIVATE}" ), fractal_ssh=fractal_ssh, logger_name=LOGGER_NAME, @@ -172,7 +172,7 @@ def reactivate_ssh( ) logger.info("end - install from pip freeze") activity.log = get_current_log(log_file_path) - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) task_group.active = True diff --git a/fractal_server/tasks/v2/ssh/reactivate_pixi.py b/fractal_server/tasks/v2/ssh/reactivate_pixi.py index f8f11662a4..b80d416cdb 100644 --- a/fractal_server/tasks/v2/ssh/reactivate_pixi.py +++ b/fractal_server/tasks/v2/ssh/reactivate_pixi.py @@ -5,8 +5,8 @@ from fractal_server.app.db import get_sync_db from fractal_server.app.models import Profile from fractal_server.app.models import Resource -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.ssh._fabric import SingleUseFractalSSH @@ -150,7 +150,7 @@ def reactivate_ssh_pixi( logger.info("installing - START") # Set status to ONGOING and refresh logs - activity.status = TaskGroupActivityStatusV2.ONGOING + activity.status = TaskGroupActivityStatus.ONGOING activity.log = get_current_log(log_file_path) activity = add_commit_refresh(obj=activity, db=db) @@ -164,7 +164,7 @@ def reactivate_ssh_pixi( script_dir_remote=script_dir_remote, prefix=( f"{int(time.time())}_" - f"{TaskGroupActivityActionV2.REACTIVATE}" + f"{TaskGroupActivityAction.REACTIVATE}" ), logger_name=LOGGER_NAME, fractal_ssh=fractal_ssh, @@ -247,7 +247,7 @@ def reactivate_ssh_pixi( activity = add_commit_refresh(obj=activity, db=db) # Finalize (write metadata to DB) - activity.status = TaskGroupActivityStatusV2.OK + activity.status = TaskGroupActivityStatus.OK activity.timestamp_ended = get_timestamp() activity = add_commit_refresh(obj=activity, db=db) task_group.active = True diff --git a/fractal_server/tasks/v2/utils_background.py b/fractal_server/tasks/v2/utils_background.py index 50e8d1c153..500e28972e 100644 --- a/fractal_server/tasks/v2/utils_background.py +++ b/fractal_server/tasks/v2/utils_background.py @@ -6,10 +6,10 @@ from fractal_server.app.models.v2 import TaskGroupActivityV2 from fractal_server.app.models.v2 import TaskGroupV2 -from fractal_server.app.schemas.v2 import TaskCreateV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2.manifest import ManifestV2 -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityActionV2 +from fractal_server.app.schemas.v2 import TaskCreate +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2.manifest import Manifest +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityAction from fractal_server.exceptions import UnreachableBranchError from fractal_server.logger import get_logger from fractal_server.logger import reset_logger_handlers @@ -66,11 +66,11 @@ def fail_and_cleanup( f"Original error: {str(exception)}" ) - task_group_activity.status = TaskGroupActivityStatusV2.FAILED + task_group_activity.status = TaskGroupActivityStatus.FAILED task_group_activity.timestamp_ended = get_timestamp() task_group_activity.log = get_current_log(log_file_path) task_group_activity = add_commit_refresh(obj=task_group_activity, db=db) - if task_group_activity.action == TaskGroupActivityActionV2.COLLECT: + if task_group_activity.action == TaskGroupActivityAction.COLLECT: db.delete(task_group) db.commit() reset_logger_handlers(logger) @@ -78,12 +78,12 @@ def fail_and_cleanup( def prepare_tasks_metadata( *, - package_manifest: ManifestV2, + package_manifest: Manifest, package_root: Path, python_bin: Path | None = None, project_python_wrapper: Path | None = None, package_version: str | None = None, -) -> list[TaskCreateV2]: +) -> list[TaskCreate]: """ Based on the package manifest and additional info, prepare the task list. @@ -129,7 +129,7 @@ def prepare_tasks_metadata( ) task_attributes["command_parallel"] = cmd_parallel # Create object - task_obj = TaskCreateV2( + task_obj = TaskCreate( **_task.model_dump( exclude={ "executable_non_parallel", diff --git a/fractal_server/tasks/v2/utils_database.py b/fractal_server/tasks/v2/utils_database.py index e3c0eadf52..1a2e7da640 100644 --- a/fractal_server/tasks/v2/utils_database.py +++ b/fractal_server/tasks/v2/utils_database.py @@ -3,13 +3,13 @@ from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.models.v2 import TaskV2 -from fractal_server.app.schemas.v2 import TaskCreateV2 +from fractal_server.app.schemas.v2 import TaskCreate def create_db_tasks_and_update_task_group_sync( *, task_group_id: int, - task_list: list[TaskCreateV2], + task_list: list[TaskCreate], db: DBSyncSession, ) -> TaskGroupV2: """ @@ -36,7 +36,7 @@ def create_db_tasks_and_update_task_group_sync( async def create_db_tasks_and_update_task_group_async( *, task_group_id: int, - task_list: list[TaskCreateV2], + task_list: list[TaskCreate], db: AsyncSession, ) -> TaskGroupV2: """ diff --git a/scripts/atomicity-test/tmp.py b/scripts/atomicity-test/tmp.py index d25e028cd6..33bac7f74a 100644 --- a/scripts/atomicity-test/tmp.py +++ b/scripts/atomicity-test/tmp.py @@ -8,7 +8,7 @@ from fractal_server.app.models.v2 import JobV2 from fractal_server.app.models.v2 import ProjectV2 from fractal_server.app.models.v2 import WorkflowV2 -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType async def prepare_data(): @@ -40,7 +40,7 @@ async def prepare_data(): user_email="a@example.org", first_task_index=0, last_task_index=0, - status=JobStatusTypeV2.SUBMITTED, + status=JobStatusType.SUBMITTED, ) db.add(job) await db.commit() @@ -61,7 +61,7 @@ def function1(dataset_id, job_id): time.sleep(4) job = db_sync.get(JobV2, job_id) - job.status = JobStatusTypeV2.DONE + job.status = JobStatusType.DONE job.log = "asdasdadasda" db_sync.merge(job) print(time.perf_counter(), "function1", "merged job") diff --git a/scripts/client/client.py b/scripts/client/client.py index 4e477d5c5b..a864e0d998 100644 --- a/scripts/client/client.py +++ b/scripts/client/client.py @@ -12,19 +12,19 @@ from fractal_server.app.schemas.user import UserCreate from fractal_server.app.schemas.user import UserRead from fractal_server.app.schemas.user import UserUpdate -from fractal_server.app.schemas.v2 import DatasetCreateV2 -from fractal_server.app.schemas.v2 import DatasetImportV2 -from fractal_server.app.schemas.v2 import DatasetReadV2 -from fractal_server.app.schemas.v2 import JobCreateV2 -from fractal_server.app.schemas.v2 import JobReadV2 -from fractal_server.app.schemas.v2 import ProjectCreateV2 -from fractal_server.app.schemas.v2 import ProjectReadV2 -from fractal_server.app.schemas.v2 import TaskCreateV2 -from fractal_server.app.schemas.v2 import TaskReadV2 -from fractal_server.app.schemas.v2 import WorkflowCreateV2 -from fractal_server.app.schemas.v2 import WorkflowReadV2 -from fractal_server.app.schemas.v2 import WorkflowTaskCreateV2 -from fractal_server.app.schemas.v2 import WorkflowTaskReadV2 +from fractal_server.app.schemas.v2 import DatasetCreate +from fractal_server.app.schemas.v2 import DatasetImport +from fractal_server.app.schemas.v2 import DatasetRead +from fractal_server.app.schemas.v2 import JobCreate +from fractal_server.app.schemas.v2 import JobRead +from fractal_server.app.schemas.v2 import ProjectCreate +from fractal_server.app.schemas.v2 import ProjectRead +from fractal_server.app.schemas.v2 import TaskCreate +from fractal_server.app.schemas.v2 import TaskRead +from fractal_server.app.schemas.v2 import WorkflowCreate +from fractal_server.app.schemas.v2 import WorkflowRead +from fractal_server.app.schemas.v2 import WorkflowTaskCreate +from fractal_server.app.schemas.v2 import WorkflowTaskRead from fractal_server.main import app DEFAULT_CREDENTIALS = {} @@ -155,34 +155,34 @@ def associate_user_with_profile(self, user_id: int): ) self.detail(res) - def add_project(self, project: ProjectCreateV2): + def add_project(self, project: ProjectCreate): res = self.make_request( endpoint="api/v2/project/", method="POST", data=project.model_dump(), ) self.detail(res) - return ProjectReadV2(**response_json(res)) + return ProjectRead(**response_json(res)) - def add_dataset(self, project_id, dataset: DatasetCreateV2): + def add_dataset(self, project_id, dataset: DatasetCreate): res = self.make_request( endpoint=f"api/v2/project/{project_id}/dataset/", method="POST", data=dataset.model_dump(), ) self.detail(res) - return DatasetReadV2(**response_json(res)) + return DatasetRead(**response_json(res)) - def import_dataset(self, project_id, dataset: DatasetImportV2): + def import_dataset(self, project_id, dataset: DatasetImport): res = self.make_request( endpoint=f"api/v2/project/{project_id}/dataset/import/", method="POST", data=dataset.model_dump(), ) self.detail(res) - return DatasetReadV2(**response_json(res)) + return DatasetRead(**response_json(res)) - def add_workflow(self, project_id, workflow: WorkflowCreateV2): + def add_workflow(self, project_id, workflow: WorkflowCreate): res = self.make_request( endpoint=f"api/v2/project/{project_id}/workflow/", method="POST", @@ -190,14 +190,14 @@ def add_workflow(self, project_id, workflow: WorkflowCreateV2): ) self.detail(res) - return WorkflowReadV2(**response_json(res)) + return WorkflowRead(**response_json(res)) def add_workflowtask( self, project_id: int, workflow_id: int, task_id: int, - wftask: WorkflowTaskCreateV2, + wftask: WorkflowTaskCreate, ): res = self.make_request( endpoint=f"api/v2/project/{project_id}/workflow/" @@ -207,10 +207,10 @@ def add_workflowtask( ) self.detail(res) - return WorkflowTaskReadV2(**response_json(res)) + return WorkflowTaskRead(**response_json(res)) def add_working_task(self): - task = TaskCreateV2( + task = TaskCreate( name="Echo Task", command_non_parallel="echo", command_parallel="echo", @@ -221,10 +221,10 @@ def add_working_task(self): data=task.model_dump(exclude_none=True), ) self.detail(res) - return TaskReadV2(**response_json(res)) + return TaskRead(**response_json(res)) def add_failing_task(self): - task = TaskCreateV2( + task = TaskCreate( name="Ls Task", command_non_parallel="ls", ) @@ -234,16 +234,16 @@ def add_failing_task(self): data=task.model_dump(exclude_none=True), ) self.detail(res) - return TaskReadV2(**response_json(res)) + return TaskRead(**response_json(res)) - def add_task(self, task: TaskCreateV2): + def add_task(self, task: TaskCreate): res = self.make_request( endpoint="api/v2/task/", method="POST", data=task.model_dump(exclude_none=True), ) self.detail(res) - return TaskReadV2(**response_json(res)) + return TaskRead(**response_json(res)) def whoami(self): res = self.make_request( @@ -268,7 +268,7 @@ def submit_job( project_id: int, workflow_id: int, dataset_id: int, - applyworkflow: JobCreateV2, + applyworkflow: JobCreate, ): res = self.make_request( endpoint=( @@ -280,7 +280,7 @@ def submit_job( ) self.detail(res) - return JobReadV2(**response_json(res)) + return JobRead(**response_json(res)) def wait_for_all_jobs( self, diff --git a/scripts/db_performance/create_dbs.py b/scripts/db_performance/create_dbs.py index 16c58894d5..473d96791e 100644 --- a/scripts/db_performance/create_dbs.py +++ b/scripts/db_performance/create_dbs.py @@ -14,11 +14,11 @@ from fractal_server.app.models import HistoryRun from fractal_server.app.models import HistoryUnit from fractal_server.app.models import JobV2 -from fractal_server.app.schemas.v2 import DatasetImportV2 -from fractal_server.app.schemas.v2 import JobReadV2 -from fractal_server.app.schemas.v2 import ProjectCreateV2 -from fractal_server.app.schemas.v2 import WorkflowCreateV2 -from fractal_server.app.schemas.v2 import WorkflowTaskCreateV2 +from fractal_server.app.schemas.v2 import DatasetImport +from fractal_server.app.schemas.v2 import JobRead +from fractal_server.app.schemas.v2 import ProjectCreate +from fractal_server.app.schemas.v2 import WorkflowCreate +from fractal_server.app.schemas.v2 import WorkflowTaskCreate from fractal_server.app.schemas.v2.history import HistoryUnitStatus from scripts.client import FractalClient @@ -27,7 +27,7 @@ def insert_job( project_id: int, workflow_id: int, dataset_id: int, db: Session -) -> JobReadV2: +) -> JobRead: job = JobV2( project_id=project_id, workflow_id=workflow_id, @@ -165,21 +165,21 @@ def bulk_insert_history_image_cache( admin = FractalClient() user = _create_user_client(admin, user_identifier="user1") - proj = user.add_project(ProjectCreateV2(name="MyProject")) + proj = user.add_project(ProjectCreate(name="MyProject")) working_task = admin.add_working_task() for cluster in range(num_clusters): ds = user.import_dataset( proj.id, - DatasetImportV2( + DatasetImport( name=f"MyDataset_{cluster}", zarr_dir="/invalid/zarr", ), ) wf = user.add_workflow( - proj.id, WorkflowCreateV2(name=f"MyWorkflow_{cluster}") + proj.id, WorkflowCreate(name=f"MyWorkflow_{cluster}") ) wftask = user.add_workflowtask( - proj.id, wf.id, working_task.id, WorkflowTaskCreateV2() + proj.id, wf.id, working_task.id, WorkflowTaskCreate() ) with next(get_sync_db()) as db: diff --git a/scripts/merge-dbs/create_mock_db.py b/scripts/merge-dbs/create_mock_db.py index 7693c78f53..084bd3ee5c 100644 --- a/scripts/merge-dbs/create_mock_db.py +++ b/scripts/merge-dbs/create_mock_db.py @@ -1,9 +1,9 @@ from fractal_server.app.schemas.user import UserCreate -from fractal_server.app.schemas.v2 import DatasetImportV2 -from fractal_server.app.schemas.v2 import JobCreateV2 -from fractal_server.app.schemas.v2 import ProjectCreateV2 -from fractal_server.app.schemas.v2 import WorkflowCreateV2 -from fractal_server.app.schemas.v2 import WorkflowTaskCreateV2 +from fractal_server.app.schemas.v2 import DatasetImport +from fractal_server.app.schemas.v2 import JobCreate +from fractal_server.app.schemas.v2 import ProjectCreate +from fractal_server.app.schemas.v2 import WorkflowCreate +from fractal_server.app.schemas.v2 import WorkflowTaskCreate from scripts.client import FractalClient @@ -60,19 +60,17 @@ def _user_flow_vanilla( working_task_id: int, ): user = _create_user_client(admin, user_identifier="vanilla") - proj = user.add_project(ProjectCreateV2(name="MyProject_uv")) + proj = user.add_project(ProjectCreate(name="MyProject_uv")) image_list = create_image_list(n_images=10) ds = user.import_dataset( proj.id, - DatasetImportV2( + DatasetImport( name="MyDataset", zarr_dir="/invalid/zarr", images=image_list ), ) - wf = user.add_workflow(proj.id, WorkflowCreateV2(name="MyWorkflow")) - user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() - ) - user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreateV2()) + wf = user.add_workflow(proj.id, WorkflowCreate(name="MyWorkflow")) + user.add_workflowtask(proj.id, wf.id, working_task_id, WorkflowTaskCreate()) + user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreate()) return user.whoami() @@ -88,29 +86,29 @@ def _user_flow_power( failing_task_id: int, ): user = _create_user_client(admin, user_identifier="power") - proj = user.add_project(ProjectCreateV2(name="MyProject_upw")) + proj = user.add_project(ProjectCreate(name="MyProject_upw")) # we add also a dataset with images image_list = create_image_list(n_images=100) num_workflows = 20 num_jobs_per_workflow = 20 for ind_wf in range(num_workflows): wf = user.add_workflow( - proj.id, WorkflowCreateV2(name=f"MyWorkflow-{ind_wf}") + proj.id, WorkflowCreate(name=f"MyWorkflow-{ind_wf}") ) user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() + proj.id, wf.id, working_task_id, WorkflowTaskCreate() ) if ind_wf % 2 == 0: user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() + proj.id, wf.id, working_task_id, WorkflowTaskCreate() ) user.add_workflowtask( - proj.id, wf.id, failing_task_id, WorkflowTaskCreateV2() + proj.id, wf.id, failing_task_id, WorkflowTaskCreate() ) for ind_job in range(num_jobs_per_workflow): ds = user.import_dataset( proj.id, - DatasetImportV2( + DatasetImport( name="MyDataset", zarr_dir="/invalid/zarr", images=image_list, @@ -120,7 +118,7 @@ def _user_flow_power( proj.id, wf.id, ds.id, - applyworkflow=JobCreateV2(), + applyworkflow=JobCreate(), ) @@ -134,14 +132,14 @@ def _user_flow_dataset( working_task_id: int, ): user = _create_user_client(admin, user_identifier="dataset") - proj = user.add_project(ProjectCreateV2(name="MyProject_us")) + proj = user.add_project(ProjectCreate(name="MyProject_us")) image_list = create_image_list(n_images=1000) n_datasets = 20 ds_list = [] for i in range(n_datasets): ds = user.import_dataset( proj.id, - DatasetImportV2( + DatasetImport( name=f"MyDataset_us-{i}", zarr_dir="/invalid/zarr", images=image_list, @@ -152,17 +150,17 @@ def _user_flow_dataset( num_workflows = 20 for i in range(num_workflows): wf = user.add_workflow( - proj.id, WorkflowCreateV2(name=f"MyWorkflow_us-{i}") + proj.id, WorkflowCreate(name=f"MyWorkflow_us-{i}") ) user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() + proj.id, wf.id, working_task_id, WorkflowTaskCreate() ) for ds in ds_list: user.submit_job( proj.id, wf.id, ds.id, - applyworkflow=JobCreateV2(), + applyworkflow=JobCreate(), ) @@ -180,27 +178,27 @@ def _user_flow_project( num_jobs_per_workflow = 5 image_list = create_image_list(100) for i in range(n_projects): - proj = user.add_project(ProjectCreateV2(name=f"MyProject_upj-{i}")) + proj = user.add_project(ProjectCreate(name=f"MyProject_upj-{i}")) ds = user.import_dataset( proj.id, - DatasetImportV2( + DatasetImport( name=f"MyDataset_up-{i}", zarr_dir="/invalid/zarr", images=image_list, ), ) wf = user.add_workflow( - proj.id, WorkflowCreateV2(name=f"MyWorkflow_up-{i}") + proj.id, WorkflowCreate(name=f"MyWorkflow_up-{i}") ) user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() + proj.id, wf.id, working_task_id, WorkflowTaskCreate() ) for i in range(num_jobs_per_workflow): user.submit_job( proj.id, wf.id, ds.id, - applyworkflow=JobCreateV2(), + applyworkflow=JobCreate(), ) @@ -214,21 +212,19 @@ def _user_flow_job( working_task_id: int, ): user = _create_user_client(admin, user_identifier="job") - proj = user.add_project(ProjectCreateV2(name="MyProject_uj")) + proj = user.add_project(ProjectCreate(name="MyProject_uj")) image_list = create_image_list(n_images=10) ds = user.import_dataset( proj.id, - DatasetImportV2( + DatasetImport( name="MyDataset", zarr_dir="/invalid/zarr", images=image_list ), ) - wf = user.add_workflow(proj.id, WorkflowCreateV2(name="MyWorkflow_uj")) - user.add_workflowtask( - proj.id, wf.id, working_task_id, WorkflowTaskCreateV2() - ) + wf = user.add_workflow(proj.id, WorkflowCreate(name="MyWorkflow_uj")) + user.add_workflowtask(proj.id, wf.id, working_task_id, WorkflowTaskCreate()) num_jobs_per_workflow = 100 for i in range(num_jobs_per_workflow): - user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreateV2()) + user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreate()) if __name__ == "__main__": diff --git a/scripts/validate_db_data_with_read_schemas.py b/scripts/validate_db_data_with_read_schemas.py index cf5625af3f..d1bd4e47ee 100644 --- a/scripts/validate_db_data_with_read_schemas.py +++ b/scripts/validate_db_data_with_read_schemas.py @@ -13,14 +13,14 @@ from fractal_server.app.models.v2 import WorkflowV2 from fractal_server.app.schemas.user import UserRead from fractal_server.app.schemas.user_group import UserGroupRead -from fractal_server.app.schemas.v2 import DatasetReadV2 -from fractal_server.app.schemas.v2 import JobReadV2 -from fractal_server.app.schemas.v2 import ProjectReadV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read -from fractal_server.app.schemas.v2 import TaskGroupReadV2 -from fractal_server.app.schemas.v2 import TaskReadV2 -from fractal_server.app.schemas.v2 import WorkflowReadV2 -from fractal_server.app.schemas.v2 import WorkflowTaskReadV2 +from fractal_server.app.schemas.v2 import DatasetRead +from fractal_server.app.schemas.v2 import JobRead +from fractal_server.app.schemas.v2 import ProjectRead +from fractal_server.app.schemas.v2 import TaskGroupActivityRead +from fractal_server.app.schemas.v2 import TaskGroupRead +from fractal_server.app.schemas.v2 import TaskRead +from fractal_server.app.schemas.v2 import WorkflowRead +from fractal_server.app.schemas.v2 import WorkflowTaskRead from fractal_server.config import get_settings from fractal_server.syringe import Inject @@ -73,14 +73,14 @@ stm = select(ProjectV2) projects = db.execute(stm).scalars().all() for project in sorted(projects, key=lambda x: x.id): - ProjectReadV2(**project.model_dump()) + ProjectRead(**project.model_dump()) print(f"V2 - Project {project.id} validated") # TASKS V2 stm = select(TaskV2) tasks = db.execute(stm).scalars().all() for task in sorted(tasks, key=lambda x: x.id): - TaskReadV2(**task.model_dump()) + TaskRead(**task.model_dump()) print(f"V2 - Task {task.id} validated") # TASK GROUPS V2 @@ -89,15 +89,15 @@ for task_group in sorted(task_groups, key=lambda x: x.id): task_list = [] for task in task_group.task_list: - task_list.append(TaskReadV2(**task.model_dump())) - TaskGroupReadV2(**task_group.model_dump(), task_list=task_list) + task_list.append(TaskRead(**task.model_dump())) + TaskGroupRead(**task_group.model_dump(), task_list=task_list) print(f"V2 - TaskGroup {task_group.id} validated") # TASK GROUP V2 ACTIVITIES stm = select(TaskGroupActivityV2) task_group_activities = db.execute(stm).scalars().all() for activity in sorted(task_group_activities, key=lambda x: x.id): - TaskGroupActivityV2Read(**activity.model_dump()) + TaskGroupActivityRead(**activity.model_dump()) print(f"V2 - TaskGroupActivity {activity.id} validated") # WORKFLOWS V2 @@ -108,15 +108,15 @@ task_list = [] for wftask in workflow.task_list: task_list.append( - WorkflowTaskReadV2( + WorkflowTaskRead( **wftask.model_dump(), - task=TaskReadV2(**wftask.task.model_dump()), + task=TaskRead(**wftask.task.model_dump()), ) ) - WorkflowReadV2( + WorkflowRead( **workflow.model_dump(), - project=ProjectReadV2(**workflow.project.model_dump()), + project=ProjectRead(**workflow.project.model_dump()), task_list=task_list, ) print(f"V2 - Workflow {workflow.id} validated") @@ -125,9 +125,9 @@ stm = select(DatasetV2) datasets = db.execute(stm).scalars().all() for dataset in sorted(datasets, key=lambda x: x.id): - DatasetReadV2( + DatasetRead( **dataset.model_dump(), - project=ProjectReadV2(**dataset.project.model_dump()), + project=ProjectRead(**dataset.project.model_dump()), ) print(f"V2 - Dataset {dataset.id} validated") @@ -135,5 +135,5 @@ stm = select(JobV2) jobs = db.execute(stm).scalars().all() for job in sorted(jobs, key=lambda x: x.id): - JobReadV2(**job.model_dump()) + JobRead(**job.model_dump()) print(f"V2 - Job {job.id} validated") diff --git a/tests/fixtures_tasks_v2.py b/tests/fixtures_tasks_v2.py index a4d043fa08..c396dd3291 100644 --- a/tests/fixtures_tasks_v2.py +++ b/tests/fixtures_tasks_v2.py @@ -17,9 +17,9 @@ from fractal_server.app.models.v2 import Resource from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.models.v2 import TaskV2 -from fractal_server.app.schemas.v2 import ManifestV2 -from fractal_server.app.schemas.v2 import TaskCreateV2 -from fractal_server.app.schemas.v2 import TaskGroupCreateV2 +from fractal_server.app.schemas.v2 import Manifest +from fractal_server.app.schemas.v2 import TaskCreate +from fractal_server.app.schemas.v2 import TaskGroupCreate from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata from fractal_server.tasks.v2.utils_database import ( create_db_tasks_and_update_task_group_sync, @@ -70,8 +70,8 @@ def fractal_tasks_mock_collection( with open(package_root / "__FRACTAL_MANIFEST__.json") as f: manifest_dict = json.load(f) - manifest = ManifestV2(**manifest_dict) - task_list: list[TaskCreateV2] = prepare_tasks_metadata( + manifest = Manifest(**manifest_dict) + task_list: list[TaskCreate] = prepare_tasks_metadata( package_manifest=manifest, python_bin=venv_python, package_root=package_root, @@ -99,7 +99,7 @@ def fractal_tasks_mock_db( ) resource_id = res.scalar_one() - task_group_obj = TaskGroupCreateV2( + task_group_obj = TaskGroupCreate( origin="other", pkg_name="fractal_tasks_mock", user_id=first_user.id, diff --git a/tests/v2/test_01_schemas/test_schemas_dataset.py b/tests/v2/test_01_schemas/test_schemas_dataset.py index e2d97623d2..0df66f1d3d 100644 --- a/tests/v2/test_01_schemas/test_schemas_dataset.py +++ b/tests/v2/test_01_schemas/test_schemas_dataset.py @@ -3,10 +3,10 @@ from fractal_server.app.models.v2 import DatasetV2 from fractal_server.app.models.v2 import ProjectV2 -from fractal_server.app.schemas.v2 import DatasetCreateV2 -from fractal_server.app.schemas.v2 import DatasetImportV2 -from fractal_server.app.schemas.v2 import DatasetReadV2 -from fractal_server.app.schemas.v2 import DatasetUpdateV2 +from fractal_server.app.schemas.v2 import DatasetCreate +from fractal_server.app.schemas.v2 import DatasetImport +from fractal_server.app.schemas.v2 import DatasetRead +from fractal_server.app.schemas.v2 import DatasetUpdate from fractal_server.urls import normalize_url @@ -14,18 +14,18 @@ async def test_schemas_dataset_v2(): project = ProjectV2(id=1, name="project") # Test zarr_dir=None is valid - DatasetCreateV2(name="name", zarr_dir=None) + DatasetCreate(name="name", zarr_dir=None) - dataset_create = DatasetCreateV2( + dataset_create = DatasetCreate( name="name", zarr_dir="/tmp/", ) assert dataset_create.zarr_dir == normalize_url(dataset_create.zarr_dir) with pytest.raises(ValidationError): - DatasetImportV2(name="name", zarr_dir=None) + DatasetImport(name="name", zarr_dir=None) - dataset_import = DatasetImportV2( + dataset_import = DatasetImport( name="name", zarr_dir="/tmp/", images=[{"zarr_url": "/tmp/image/"}], @@ -41,16 +41,16 @@ async def test_schemas_dataset_v2(): # Read - DatasetReadV2(**dataset.model_dump(), project=project.model_dump()) + DatasetRead(**dataset.model_dump(), project=project.model_dump()) # Update # validation accepts `zarr_dir` as None, but not `name` - DatasetUpdateV2(zarr_dir=None) + DatasetUpdate(zarr_dir=None) with pytest.raises(ValidationError): - DatasetUpdateV2(name=None) + DatasetUpdate(name=None) - dataset_update = DatasetUpdateV2(name="new name", zarr_dir="/zarr/") + dataset_update = DatasetUpdate(name="new name", zarr_dir="/zarr/") assert not dataset_update.zarr_dir.endswith("/") for key, value in dataset_update.model_dump(exclude_unset=True).items(): @@ -60,19 +60,19 @@ async def test_schemas_dataset_v2(): def test_zarr_dir(): - DatasetCreateV2(name="foo", zarr_dir="/") + DatasetCreate(name="foo", zarr_dir="/") assert ( - DatasetCreateV2(name="foo", zarr_dir="/foo/bar").zarr_dir - == DatasetCreateV2(name="foo", zarr_dir=" /foo/bar").zarr_dir - == DatasetCreateV2(name="foo", zarr_dir="/foo/bar ").zarr_dir + DatasetCreate(name="foo", zarr_dir="/foo/bar").zarr_dir + == DatasetCreate(name="foo", zarr_dir=" /foo/bar").zarr_dir + == DatasetCreate(name="foo", zarr_dir="/foo/bar ").zarr_dir == "/foo/bar" ) assert ( - DatasetCreateV2(name="foo", zarr_dir=" / foo bar ").zarr_dir + DatasetCreate(name="foo", zarr_dir=" / foo bar ").zarr_dir == "/ foo bar" ) with pytest.raises(ValidationError): - DatasetCreateV2(name="foo", zarr_dir="not/absolute") + DatasetCreate(name="foo", zarr_dir="not/absolute") - DatasetCreateV2(name="foo", zarr_dir="/#special/chars") + DatasetCreate(name="foo", zarr_dir="/#special/chars") diff --git a/tests/v2/test_01_schemas/test_schemas_manifest.py b/tests/v2/test_01_schemas/test_schemas_manifest.py index aaa2b6f3a9..3bfa5bb67d 100644 --- a/tests/v2/test_01_schemas/test_schemas_manifest.py +++ b/tests/v2/test_01_schemas/test_schemas_manifest.py @@ -1,8 +1,8 @@ import pytest from pydantic import ValidationError -from fractal_server.app.schemas.v2.manifest import ManifestV2 -from fractal_server.app.schemas.v2.manifest import TaskManifestV2 +from fractal_server.app.schemas.v2.manifest import Manifest +from fractal_server.app.schemas.v2.manifest import TaskManifest def msg(e: pytest.ExceptionInfo) -> str: @@ -10,19 +10,19 @@ def msg(e: pytest.ExceptionInfo) -> str: def test_TaskManifestV2(): - assert TaskManifestV2(name="task", executable_parallel="exec") - assert TaskManifestV2(name="task", executable_non_parallel="exec") - assert TaskManifestV2( + assert TaskManifest(name="task", executable_parallel="exec") + assert TaskManifest(name="task", executable_non_parallel="exec") + assert TaskManifest( name="task", executable_parallel="exec", executable_non_parallel="exec" ) # 1: no executable with pytest.raises(ValidationError): - TaskManifestV2(name="task") + TaskManifest(name="task") # 2: parallel with non_parallel meta with pytest.raises(ValidationError) as e: - TaskManifestV2( + TaskManifest( name="task", executable_parallel="exec", meta_non_parallel={"a": "b"}, @@ -31,7 +31,7 @@ def test_TaskManifestV2(): # 3: parallel with non_parallel args_schema with pytest.raises(ValidationError) as e: - TaskManifestV2( + TaskManifest( name="task", executable_parallel="exec", args_schema_non_parallel={"a": "b"}, @@ -40,7 +40,7 @@ def test_TaskManifestV2(): # 4: non_parallel with parallel meta with pytest.raises(ValidationError) as e: - TaskManifestV2( + TaskManifest( name="task", executable_non_parallel="exec", meta_parallel={"a": "b"}, @@ -49,7 +49,7 @@ def test_TaskManifestV2(): # 5: non_parallel with parallel args_schema with pytest.raises(ValidationError) as e: - TaskManifestV2( + TaskManifest( name="task", executable_non_parallel="exec", args_schema_parallel={"a": "b"}, @@ -61,12 +61,12 @@ def test_TaskManifestV2(): ValidationError, match="Input should be a valid URL", ): - TaskManifestV2( + TaskManifest( name="task", executable_parallel="exec", docs_link="not-an-url", ) - TaskManifestV2( + TaskManifest( name="task", executable_parallel="exec", docs_link="https://url.com", @@ -74,52 +74,50 @@ def test_TaskManifestV2(): def test_ManifestV2(): - assert ManifestV2(manifest_version="2", task_list=[]) + assert Manifest(manifest_version="2", task_list=[]) - compound_both_schemas = TaskManifestV2( + compound_both_schemas = TaskManifest( name="task1", executable_parallel="exec", args_schema_parallel={"a": "b"}, executable_non_parallel="exec", args_schema_non_parallel={"a": "b"}, ) - compound_just_parallel_schemas = TaskManifestV2( + compound_just_parallel_schemas = TaskManifest( name="task2", executable_parallel="exec", args_schema_parallel={"a": "b"}, executable_non_parallel="exec", ) - compound_just_non_parallel_schemas = TaskManifestV2( + compound_just_non_parallel_schemas = TaskManifest( name="task3", executable_parallel="exec", executable_non_parallel="exec", args_schema_non_parallel={"a": "b"}, ) - compound_no_schemas = TaskManifestV2( + compound_no_schemas = TaskManifest( name="task4", executable_parallel="exec", executable_non_parallel="exec", ) - parallel_schema = TaskManifestV2( + parallel_schema = TaskManifest( name="task5", executable_parallel="exec", args_schema_parallel={"a": "b"}, ) - parallel_no_schema = TaskManifestV2( - name="task6", executable_parallel="exec" - ) + parallel_no_schema = TaskManifest(name="task6", executable_parallel="exec") - non_parallel_schema = TaskManifestV2( + non_parallel_schema = TaskManifest( name="task7", executable_non_parallel="exec", args_schema_non_parallel={"a": "b"}, ) - non_parallel_no_schema = TaskManifestV2( + non_parallel_no_schema = TaskManifest( name="task8", executable_non_parallel="exec" ) - assert ManifestV2( + assert Manifest( manifest_version="2", has_args_schemas=True, task_list=[ @@ -131,12 +129,12 @@ def test_ManifestV2(): # 1: invalid manifest_version with pytest.raises(ValidationError) as exc_info: - ManifestV2(manifest_version="1", task_list=[]) + Manifest(manifest_version="1", task_list=[]) print(exc_info.value) # 2: compound_just_parallel_schemas with pytest.raises(ValidationError) as e: - ManifestV2( + Manifest( manifest_version="2", has_args_schemas=True, task_list=[ @@ -149,7 +147,7 @@ def test_ManifestV2(): # 3: compound_just_parallel_schemas with pytest.raises(ValidationError) as e: - ManifestV2( + Manifest( manifest_version="2", has_args_schemas=True, task_list=[ @@ -162,7 +160,7 @@ def test_ManifestV2(): # 4: compound_no_schemas with pytest.raises(ValidationError) as e: - ManifestV2( + Manifest( manifest_version="2", has_args_schemas=True, task_list=[ @@ -175,7 +173,7 @@ def test_ManifestV2(): # 5: parallel_no_schema with pytest.raises(ValidationError) as e: - ManifestV2( + Manifest( manifest_version="2", has_args_schemas=True, task_list=[ @@ -188,7 +186,7 @@ def test_ManifestV2(): # 6: non_parallel_no_schema with pytest.raises(ValidationError) as e: - ManifestV2( + Manifest( manifest_version="2", has_args_schemas=True, task_list=[ @@ -201,7 +199,7 @@ def test_ManifestV2(): # 7: Non-unique task names with pytest.raises(ValidationError) as e: - ManifestV2( + Manifest( manifest_version="2", has_args_schemas=True, task_list=[ diff --git a/tests/v2/test_01_schemas/test_schemas_workflow.py b/tests/v2/test_01_schemas/test_schemas_workflow.py index a87cde0daa..72168c69ce 100644 --- a/tests/v2/test_01_schemas/test_schemas_workflow.py +++ b/tests/v2/test_01_schemas/test_schemas_workflow.py @@ -3,11 +3,11 @@ from fractal_server.app.models.v2 import ProjectV2 from fractal_server.app.models.v2 import WorkflowV2 -from fractal_server.app.schemas.v2 import WorkflowCreateV2 -from fractal_server.app.schemas.v2 import WorkflowReadV2 -from fractal_server.app.schemas.v2 import WorkflowTaskCreateV2 -from fractal_server.app.schemas.v2 import WorkflowTaskUpdateV2 -from fractal_server.app.schemas.v2 import WorkflowUpdateV2 +from fractal_server.app.schemas.v2 import WorkflowCreate +from fractal_server.app.schemas.v2 import WorkflowRead +from fractal_server.app.schemas.v2 import WorkflowTaskCreate +from fractal_server.app.schemas.v2 import WorkflowTaskUpdate +from fractal_server.app.schemas.v2 import WorkflowUpdate async def test_schemas_workflow_v2(): @@ -15,7 +15,7 @@ async def test_schemas_workflow_v2(): # Create - workflow_create = WorkflowCreateV2(name="workflow") + workflow_create = WorkflowCreate(name="workflow") workflow = WorkflowV2( **workflow_create.model_dump(), @@ -25,7 +25,7 @@ async def test_schemas_workflow_v2(): # Read - WorkflowReadV2( + WorkflowRead( **workflow.model_dump(), project=project.model_dump(), task_list=workflow.task_list, @@ -34,28 +34,28 @@ async def test_schemas_workflow_v2(): # Update with pytest.raises(ValidationError): - WorkflowUpdateV2(name=None) + WorkflowUpdate(name=None) with pytest.raises(ValidationError): - WorkflowUpdateV2(name="foo", reordered_workflowtask_ids=[1, 2, -3]) + WorkflowUpdate(name="foo", reordered_workflowtask_ids=[1, 2, -3]) - WorkflowUpdateV2(name="new name", reordered_workflowtask_ids=[1, 2, 3]) + WorkflowUpdate(name="new name", reordered_workflowtask_ids=[1, 2, 3]) async def test_schemas_workflow_task_v2(): for attribute in ("args_parallel", "args_non_parallel"): - WorkflowTaskCreateV2(**{attribute: dict(something="else")}) + WorkflowTaskCreate(**{attribute: dict(something="else")}) - WorkflowTaskUpdateV2(**{attribute: dict(something="else")}) + WorkflowTaskUpdate(**{attribute: dict(something="else")}) - WorkflowTaskCreateV2(**{attribute: None}) + WorkflowTaskCreate(**{attribute: None}) - WorkflowTaskUpdateV2(**{attribute: None}) + WorkflowTaskUpdate(**{attribute: None}) with pytest.raises(ValidationError) as e: - WorkflowTaskUpdateV2(**{attribute: dict(zarr_url="/something")}) + WorkflowTaskUpdate(**{attribute: dict(zarr_url="/something")}) assert "contains the following forbidden keys" in str(e.value) with pytest.raises(ValidationError) as e: - WorkflowTaskCreateV2(**{attribute: dict(zarr_url="/something")}) + WorkflowTaskCreate(**{attribute: dict(zarr_url="/something")}) assert "contains the following forbidden keys" in str(e.value) diff --git a/tests/v2/test_01_schemas/test_task_collection.py b/tests/v2/test_01_schemas/test_task_collection.py index f8e3c9370e..422cbec185 100644 --- a/tests/v2/test_01_schemas/test_task_collection.py +++ b/tests/v2/test_01_schemas/test_task_collection.py @@ -3,64 +3,64 @@ import pytest from pydantic import ValidationError -from fractal_server.app.schemas.v2 import ManifestV2 -from fractal_server.app.schemas.v2 import TaskCollectCustomV2 -from fractal_server.app.schemas.v2 import TaskCollectPipV2 -from fractal_server.app.schemas.v2 import TaskGroupCreateV2Strict -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum +from fractal_server.app.schemas.v2 import Manifest +from fractal_server.app.schemas.v2 import TaskCollectCustom +from fractal_server.app.schemas.v2 import TaskCollectPip +from fractal_server.app.schemas.v2 import TaskGroupCreateStrict +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum def test_TaskCollectPipV2(): """ Check that leading/trailing whitespace characters were removed """ - collection = TaskCollectPipV2( + collection = TaskCollectPip( package=" package ", package_version=" 1.2.3 ", ) assert collection.package == "package" assert collection.package_version == "1.2.3" - collection_none = TaskCollectPipV2( + collection_none = TaskCollectPip( package="pkg", pinned_package_versions_post=None ) assert collection_none.pinned_package_versions_post is None - sanitized_keys = TaskCollectPipV2( + sanitized_keys = TaskCollectPip( package="pkg", pinned_package_versions_post={" a ": "1.0.0"} ) assert sanitized_keys.pinned_package_versions_post == dict(a="1.0.0") with pytest.raises(ValidationError): - TaskCollectPipV2( + TaskCollectPip( package="pkg", pinned_package_versions_post={";maliciouscmd": "1.0.0"}, ) with pytest.raises(ValidationError): - TaskCollectPipV2( + TaskCollectPip( package="pkg", pinned_package_versions_post={"pkg": ";maliciouscmd"}, ) with pytest.raises(ValidationError): - TaskCollectPipV2( + TaskCollectPip( package="pkg", pinned_package_versions_post={" a ": "1.0.0", "a": "2.0.0"}, ) with pytest.raises(ValidationError): - TaskCollectPipV2( + TaskCollectPip( package="pkg", pinned_package_versions_post={" ": "1.0.0"} ) with pytest.raises(ValidationError, match="must not contain"): - TaskCollectPipV2(package="; rm x") + TaskCollectPip(package="; rm x") with pytest.raises(ValidationError, match="must not contain"): - TaskCollectPipV2(package="pkg", package_version="; rm x") + TaskCollectPip(package="pkg", package_version="; rm x") with pytest.raises(ValidationError, match="must not contain"): - TaskCollectPipV2( + TaskCollectPip( package="pkg", package_version="1.2.3", package_extras="]; rm x; [" ) @@ -76,8 +76,8 @@ async def test_TaskCollectCustomV2(testdata_path): manifest_dict = json.load(f) with pytest.raises(ValidationError) as e: - TaskCollectCustomV2( - manifest=ManifestV2(**manifest_dict), + TaskCollectCustom( + manifest=Manifest(**manifest_dict), python_interpreter="/a", label="b", package_root=None, @@ -86,8 +86,8 @@ async def test_TaskCollectCustomV2(testdata_path): assert "must not contain" in e._excinfo[1].errors()[0]["msg"] with pytest.raises(ValidationError) as e: - TaskCollectCustomV2( - manifest=ManifestV2(**manifest_dict), + TaskCollectCustom( + manifest=Manifest(**manifest_dict), python_interpreter="a", label="name", package_root=None, @@ -96,8 +96,8 @@ async def test_TaskCollectCustomV2(testdata_path): assert "String must be an absolute path" in str(e.value) with pytest.raises(ValidationError) as e: - TaskCollectCustomV2( - manifest=ManifestV2(**manifest_dict), + TaskCollectCustom( + manifest=Manifest(**manifest_dict), python_interpreter="/a", label="name", package_root="non_absolute_path", @@ -107,8 +107,8 @@ async def test_TaskCollectCustomV2(testdata_path): # Fail because neither 'package_root' nor 'package_name' with pytest.raises(ValidationError) as e: - TaskCollectCustomV2( - manifest=ManifestV2(**manifest_dict), + TaskCollectCustom( + manifest=Manifest(**manifest_dict), python_interpreter="/a", label="name", package_root=None, @@ -118,8 +118,8 @@ async def test_TaskCollectCustomV2(testdata_path): assert "One and only one must be set" in str(e.value) # Successful - collection = TaskCollectCustomV2( - manifest=ManifestV2(**manifest_dict), + collection = TaskCollectCustom( + manifest=Manifest(**manifest_dict), python_interpreter=" /some/python ", label="b", package_root=" /somewhere ", @@ -132,12 +132,12 @@ async def test_TaskCollectCustomV2(testdata_path): def test_TaskGroupCreateV2Strict(): # Success - TaskGroupCreateV2Strict( + TaskGroupCreateStrict( path="/a", venv_path="/b", version="/c", python_version="/d", - origin=TaskGroupV2OriginEnum.WHEELFILE, + origin=TaskGroupOriginEnum.WHEELFILE, archive_path="/a", pkg_name="x", user_id=1, @@ -145,23 +145,23 @@ def test_TaskGroupCreateV2Strict(): ) # Validators from parent class with pytest.raises(ValueError, match="absolute path"): - TaskGroupCreateV2Strict( + TaskGroupCreateStrict( path="a", venv_path="b", version="c", python_version="d", - origin=TaskGroupV2OriginEnum.PYPI, + origin=TaskGroupOriginEnum.PYPI, pkg_name="x", user_id=1, resource_id=1, ) # No path with pytest.raises(ValidationError): - TaskGroupCreateV2Strict( + TaskGroupCreateStrict( venv_path="/b", version="c", python_version="d", - origin=TaskGroupV2OriginEnum.WHEELFILE, + origin=TaskGroupOriginEnum.WHEELFILE, archive_path="/a", pkg_name="x", user_id=1, @@ -169,11 +169,11 @@ def test_TaskGroupCreateV2Strict(): ) # No venv_path with pytest.raises(ValidationError): - TaskGroupCreateV2Strict( + TaskGroupCreateStrict( path="/a", version="c", python_version="d", - origin=TaskGroupV2OriginEnum.WHEELFILE, + origin=TaskGroupOriginEnum.WHEELFILE, archive_path="/a", pkg_name="x", user_id=1, @@ -181,11 +181,11 @@ def test_TaskGroupCreateV2Strict(): ) # No version with pytest.raises(ValidationError): - TaskGroupCreateV2Strict( + TaskGroupCreateStrict( path="/a", venv_path="/b", python_version="d", - origin=TaskGroupV2OriginEnum.WHEELFILE, + origin=TaskGroupOriginEnum.WHEELFILE, archive_path="/a", pkg_name="x", user_id=1, @@ -193,11 +193,11 @@ def test_TaskGroupCreateV2Strict(): ) # No python_version with pytest.raises(ValidationError): - TaskGroupCreateV2Strict( + TaskGroupCreateStrict( path="/a", venv_path="/b", version="c", - origin=TaskGroupV2OriginEnum.WHEELFILE, + origin=TaskGroupOriginEnum.WHEELFILE, archive_path="/a", pkg_name="x", user_id=1, diff --git a/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py b/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py index 1de4b814e3..4db68cc368 100644 --- a/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py +++ b/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py @@ -2,7 +2,7 @@ from pathlib import Path import fractal_server.app.schemas.v2 as v2 -from fractal_server.app.schemas.v2.manifest import ManifestV2 +from fractal_server.app.schemas.v2.manifest import Manifest def test_ManifestV2_jsonschema(): @@ -15,5 +15,5 @@ def test_ManifestV2_jsonschema(): ) with json_schema_path.open("r") as f: current_schema = json.load(f) - new_schema = ManifestV2.model_json_schema() + new_schema = Manifest.model_json_schema() assert new_schema == current_schema diff --git a/tests/v2/test_01_schemas/test_unit_schemas_v2.py b/tests/v2/test_01_schemas/test_unit_schemas_v2.py index 3307471969..7c8df3ba61 100644 --- a/tests/v2/test_01_schemas/test_unit_schemas_v2.py +++ b/tests/v2/test_01_schemas/test_unit_schemas_v2.py @@ -2,48 +2,48 @@ from pydantic import ValidationError from fractal_server.app.schemas import UserUpdateStrict -from fractal_server.app.schemas.v2 import DatasetCreateV2 -from fractal_server.app.schemas.v2 import JobCreateV2 -from fractal_server.app.schemas.v2 import ProjectCreateV2 -from fractal_server.app.schemas.v2 import TaskCollectPipV2 -from fractal_server.app.schemas.v2 import TaskCreateV2 -from fractal_server.app.schemas.v2 import TaskDumpV2 -from fractal_server.app.schemas.v2 import TaskUpdateV2 -from fractal_server.app.schemas.v2 import WorkflowCreateV2 -from fractal_server.app.schemas.v2 import WorkflowTaskCreateV2 -from fractal_server.app.schemas.v2 import WorkflowTaskDumpV2 +from fractal_server.app.schemas.v2 import DatasetCreate +from fractal_server.app.schemas.v2 import JobCreate +from fractal_server.app.schemas.v2 import ProjectCreate +from fractal_server.app.schemas.v2 import TaskCollectPip +from fractal_server.app.schemas.v2 import TaskCreate +from fractal_server.app.schemas.v2 import TaskDump +from fractal_server.app.schemas.v2 import TaskUpdate +from fractal_server.app.schemas.v2 import WorkflowCreate +from fractal_server.app.schemas.v2 import WorkflowTaskCreate +from fractal_server.app.schemas.v2 import WorkflowTaskDump def test_extra_on_create_models(): # Dataset - DatasetCreateV2(name="name", zarr_dir="/zarr/dir") + DatasetCreate(name="name", zarr_dir="/zarr/dir") with pytest.raises(ValidationError): - DatasetCreateV2(name="name", zarr_dir="/zarr/dir", foo="bar") + DatasetCreate(name="name", zarr_dir="/zarr/dir", foo="bar") # Job - JobCreateV2() + JobCreate() with pytest.raises(ValidationError): - JobCreateV2(foo="bar") + JobCreate(foo="bar") # Project - ProjectCreateV2(name="name") + ProjectCreate(name="name") with pytest.raises(ValidationError): - ProjectCreateV2(name="name", foo="bar") + ProjectCreate(name="name", foo="bar") # Task - TaskCreateV2(name="name", command_parallel="cmd") + TaskCreate(name="name", command_parallel="cmd") with pytest.raises(ValidationError): - TaskCreateV2(name="name", command_parallel="cmd", foo="bar") + TaskCreate(name="name", command_parallel="cmd", foo="bar") # Workflow - WorkflowCreateV2(name="name") + WorkflowCreate(name="name") with pytest.raises(ValidationError): - WorkflowCreateV2(name="name", foo="bar") + WorkflowCreate(name="name", foo="bar") # WorkflowTask - WorkflowTaskCreateV2() + WorkflowTaskCreate() with pytest.raises(ValidationError): - WorkflowTaskCreateV2(foo="bar") + WorkflowTaskCreate(foo="bar") def test_dictionary_keys_validation(): @@ -52,56 +52,56 @@ def test_dictionary_keys_validation(): command_non_parallel="cmd", ) with pytest.raises(ValidationError): - TaskCreateV2(**args, input_types={"": True}) + TaskCreate(**args, input_types={"": True}) with pytest.raises(ValidationError): - TaskCreateV2(**args, input_types={" ": True}) + TaskCreate(**args, input_types={" ": True}) - assert TaskCreateV2(**args, input_types={" a": True}).input_types == { + assert TaskCreate(**args, input_types={" a": True}).input_types == { "a": True } with pytest.raises(ValidationError): - TaskCreateV2(**args, input_types={"a": True, " a ": False}) + TaskCreate(**args, input_types={"a": True, " a ": False}) with pytest.raises( ValidationError, match="Task must have at least one valid command" ): - TaskCreateV2(name="name") + TaskCreate(name="name") def test_task_collect_pip(): - TaskCollectPipV2(package="x") - TaskCollectPipV2(package="/tmp/x.whl") + TaskCollectPip(package="x") + TaskCollectPip(package="/tmp/x.whl") def test_task_update(): - t = TaskUpdateV2() + t = TaskUpdate() assert t.input_types is None assert t.output_types is None with pytest.raises(ValidationError): - TaskUpdateV2(input_types=None) + TaskUpdate(input_types=None) with pytest.raises(ValidationError): - TaskUpdateV2(output_types=None) + TaskUpdate(output_types=None) with pytest.raises(ValidationError): - TaskUpdateV2(name="cannot set name") + TaskUpdate(name="cannot set name") def test_job_create(): - JobCreateV2() - JobCreateV2(last_task_index=None) - JobCreateV2(last_task_index=0) - JobCreateV2(last_task_index=1) + JobCreate() + JobCreate(last_task_index=None) + JobCreate(last_task_index=0) + JobCreate(last_task_index=1) with pytest.raises(ValidationError): - JobCreateV2(last_task_index=-1) + JobCreate(last_task_index=-1) def test_workflow_task_dump(): - WorkflowTaskDumpV2( + WorkflowTaskDump( id=1, workflow_id=1, type_filters={}, task_id=1, - task=TaskDumpV2( + task=TaskDump( id=1, name="name", type="parallel", diff --git a/tests/v2/test_02_models/test_tasks_v2.py b/tests/v2/test_02_models/test_tasks_v2.py index 38ba4469bb..2d6a4c36a0 100644 --- a/tests/v2/test_02_models/test_tasks_v2.py +++ b/tests/v2/test_02_models/test_tasks_v2.py @@ -5,7 +5,7 @@ from fractal_server.app.models.v2 import TaskGroupActivityV2 from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.models.v2 import TaskV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus async def test_task_group_v2(db, local_resource_profile_db): @@ -132,7 +132,7 @@ async def test_collection_state(db, local_resource_profile_db): task_group_activity = TaskGroupActivityV2( user_id=user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, + status=TaskGroupActivityStatus.PENDING, action="collect", pkg_name="pkg", version="1.0.0", diff --git a/tests/v2/test_03_api/admin/test_admin_job.py b/tests/v2/test_03_api/admin/test_admin_job.py index ae5e821423..7a7888deca 100644 --- a/tests/v2/test_03_api/admin/test_admin_job.py +++ b/tests/v2/test_03_api/admin/test_admin_job.py @@ -15,7 +15,7 @@ _workflow_insert_task, ) from fractal_server.app.routes.aux._runner import _backend_supports_shutdown -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType from fractal_server.app.schemas.v2 import ResourceType from fractal_server.runner.filenames import SHUTDOWN_FILENAME from fractal_server.runner.filenames import WORKFLOW_LOG_FILENAME @@ -257,8 +257,8 @@ async def test_patch_job( db, tmp_path, ): - ORIGINAL_STATUS = JobStatusTypeV2.SUBMITTED - NEW_STATUS = JobStatusTypeV2.FAILED + ORIGINAL_STATUS = JobStatusType.SUBMITTED + NEW_STATUS = JobStatusType.FAILED async with MockCurrentUser() as user: project = await project_factory_v2(user) @@ -282,7 +282,7 @@ async def test_patch_job( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, - status=JobStatusTypeV2.DONE, + status=JobStatusType.DONE, ) hr = HistoryRun( @@ -430,7 +430,7 @@ async def test_stop_job_slurm( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, - status=JobStatusTypeV2.SUBMITTED, + status=JobStatusType.SUBMITTED, user_email="fake@example.org", dataset_dump={}, workflow_dump={}, diff --git a/tests/v2/test_03_api/admin/test_admin_taskgroup.py b/tests/v2/test_03_api/admin/test_admin_taskgroup.py index 75922b9255..5206bec4b0 100644 --- a/tests/v2/test_03_api/admin/test_admin_taskgroup.py +++ b/tests/v2/test_03_api/admin/test_admin_taskgroup.py @@ -10,10 +10,10 @@ from fractal_server.app.routes.api.v2._aux_functions import ( _workflow_insert_task, ) -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType from fractal_server.app.schemas.v2 import ResourceType -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus PREFIX = "/admin/v2" @@ -202,15 +202,15 @@ async def test_get_task_group_activity( user_id=user1.id, pkg_name="foo", version="1", - status=TaskGroupActivityStatusV2.OK, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.OK, + action=TaskGroupActivityAction.COLLECT, ) activity2 = TaskGroupActivityV2( user_id=user1.id, pkg_name="bar", version="1", - status=TaskGroupActivityStatusV2.OK, - action=TaskGroupActivityActionV2.REACTIVATE, + status=TaskGroupActivityStatus.OK, + action=TaskGroupActivityAction.REACTIVATE, ) async with MockCurrentUser() as user2: task = await task_factory_v2(user_id=user2.id) @@ -218,16 +218,16 @@ async def test_get_task_group_activity( user_id=user2.id, pkg_name="foo", version="2", - status=TaskGroupActivityStatusV2.FAILED, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.FAILED, + action=TaskGroupActivityAction.COLLECT, taskgroupv2_id=task.taskgroupv2_id, ) activity4 = TaskGroupActivityV2( user_id=user2.id, pkg_name="foo", version="1", - status=TaskGroupActivityStatusV2.OK, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.OK, + action=TaskGroupActivityAction.COLLECT, taskgroupv2_id=task.taskgroupv2_id, ) @@ -388,8 +388,8 @@ async def test_admin_deactivate_task_group_api( activity = res.json() task_group_other = await db.get(TaskGroupV2, task_other.taskgroupv2_id) assert activity["version"] == "N/A" - assert activity["status"] == TaskGroupActivityStatusV2.OK - assert activity["action"] == TaskGroupActivityActionV2.DEACTIVATE + assert activity["status"] == TaskGroupActivityStatus.OK + assert activity["action"] == TaskGroupActivityAction.DEACTIVATE assert activity["timestamp_started"] is not None assert activity["timestamp_ended"] is not None assert task_group_other.active is False @@ -406,8 +406,8 @@ async def test_admin_deactivate_task_group_api( task_group_pypi = await db.get(TaskGroupV2, task_pypi.taskgroupv2_id) activity_id = activity["id"] assert activity["version"] == task_group_pypi.version - assert activity["status"] == TaskGroupActivityStatusV2.PENDING - assert activity["action"] == TaskGroupActivityActionV2.DEACTIVATE + assert activity["status"] == TaskGroupActivityStatus.PENDING + assert activity["action"] == TaskGroupActivityAction.DEACTIVATE assert activity["timestamp_started"] is not None assert activity["timestamp_ended"] is None @@ -487,8 +487,8 @@ async def test_reactivate_task_group_api( activity = res.json() assert res.status_code == 202 assert activity["version"] == "N/A" - assert activity["status"] == TaskGroupActivityStatusV2.OK - assert activity["action"] == TaskGroupActivityActionV2.REACTIVATE + assert activity["status"] == TaskGroupActivityStatus.OK + assert activity["action"] == TaskGroupActivityAction.REACTIVATE assert activity["timestamp_started"] is not None assert activity["timestamp_ended"] is not None task_group_other = await db.get(TaskGroupV2, task_other.taskgroupv2_id) @@ -517,8 +517,8 @@ async def test_reactivate_task_group_api( activity_id = activity["id"] assert res.status_code == 202 assert activity["version"] == task_group_pypi.version - assert activity["status"] == TaskGroupActivityStatusV2.PENDING - assert activity["action"] == TaskGroupActivityActionV2.REACTIVATE + assert activity["status"] == TaskGroupActivityStatus.PENDING + assert activity["action"] == TaskGroupActivityAction.REACTIVATE assert activity["timestamp_started"] is not None assert activity["timestamp_ended"] is None await db.refresh(task_group_pypi) @@ -567,7 +567,7 @@ async def test_lifecycle_actions_with_submitted_jobs( dataset_dump={}, workflow_dump={}, project_dump={}, - status=JobStatusTypeV2.SUBMITTED, + status=JobStatusType.SUBMITTED, first_task_index=0, last_task_index=1, ) @@ -611,15 +611,15 @@ async def test_admin_delete_task_group_api_local( assert res.status_code == 202 activity = res.json() activity_id = activity["id"] - assert activity["action"] == TaskGroupActivityActionV2.DELETE - assert activity["status"] == TaskGroupActivityStatusV2.PENDING + assert activity["action"] == TaskGroupActivityAction.DELETE + assert activity["status"] == TaskGroupActivityStatus.PENDING res = await client.get(f"{PREFIX}/task-group/activity/?action=delete") assert len(res.json()["items"]) == 1 activity = res.json()["items"][0] assert activity["id"] == activity_id - assert activity["action"] == TaskGroupActivityActionV2.DELETE - assert activity["status"] == TaskGroupActivityStatusV2.OK + assert activity["action"] == TaskGroupActivityAction.DELETE + assert activity["status"] == TaskGroupActivityStatus.OK @pytest.mark.container @@ -647,12 +647,12 @@ async def test_admin_delete_task_group_api_ssh( assert res.status_code == 202 activity = res.json() activity_id = activity["id"] - assert activity["action"] == TaskGroupActivityActionV2.DELETE - assert activity["status"] == TaskGroupActivityStatusV2.PENDING + assert activity["action"] == TaskGroupActivityAction.DELETE + assert activity["status"] == TaskGroupActivityStatus.PENDING res = await client.get(f"{PREFIX}/task-group/activity/?action=delete") assert len(res.json()["items"]) == 1 activity = res.json()["items"][0] assert activity["id"] == activity_id - assert activity["action"] == TaskGroupActivityActionV2.DELETE - assert activity["status"] == TaskGroupActivityStatusV2.OK + assert activity["action"] == TaskGroupActivityAction.DELETE + assert activity["status"] == TaskGroupActivityStatus.OK diff --git a/tests/v2/test_03_api/test_api_dataset.py b/tests/v2/test_03_api/test_api_dataset.py index c28a8eb5a4..6d9dfcd49b 100644 --- a/tests/v2/test_03_api/test_api_dataset.py +++ b/tests/v2/test_03_api/test_api_dataset.py @@ -3,8 +3,8 @@ from fractal_server.app.routes.api.v2._aux_functions import ( _workflow_insert_task, ) -from fractal_server.app.schemas.v2 import JobStatusTypeV2 -from fractal_server.app.schemas.v2.dataset import DatasetExportV2 +from fractal_server.app.schemas.v2 import JobStatusType +from fractal_server.app.schemas.v2.dataset import DatasetExport from fractal_server.images import SingleImage from fractal_server.string_tools import sanitize_string from fractal_server.urls import normalize_url @@ -280,7 +280,7 @@ async def test_delete_dataset_cascade_jobs( workflow_id=workflow.id, dataset_id=dataset.id, working_dir=(tmp_path / "some_working_dir").as_posix(), - status=JobStatusTypeV2.DONE, + status=JobStatusType.DONE, ) assert job.dataset_id == dataset.id @@ -308,17 +308,17 @@ async def test_delete_dataset_cascade_jobs( } j1 = await job_factory_v2( dataset_id=ds_deletable.id, - status=JobStatusTypeV2.DONE, + status=JobStatusType.DONE, **common_args, ) j2 = await job_factory_v2( dataset_id=ds_deletable.id, - status=JobStatusTypeV2.FAILED, + status=JobStatusType.FAILED, **common_args, ) await job_factory_v2( dataset_id=ds_not_deletable.id, - status=JobStatusTypeV2.SUBMITTED, # reason why ds is not deletable + status=JobStatusType.SUBMITTED, # reason why ds is not deletable **common_args, ) res = await client.delete( @@ -484,6 +484,4 @@ async def test_export_dataset( f"/api/v2/project/{project.id}/dataset/{dataset.id}/export/" ) assert res.status_code == 200 - assert ( - res.json() == DatasetExportV2(**dataset.model_dump()).model_dump() - ) + assert res.json() == DatasetExport(**dataset.model_dump()).model_dump() diff --git a/tests/v2/test_03_api/test_api_history.py b/tests/v2/test_03_api/test_api_history.py index 79a0882d87..decc688097 100644 --- a/tests/v2/test_03_api/test_api_history.py +++ b/tests/v2/test_03_api/test_api_history.py @@ -3,7 +3,7 @@ from fractal_server.app.models import TaskGroupV2 from fractal_server.app.models import TaskV2 from fractal_server.app.schemas.v2 import HistoryUnitStatus -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType async def test_get_workflow_tasks_statuses( @@ -84,13 +84,13 @@ async def test_get_workflow_tasks_statuses( job_A = JobV2( first_task_index=0, last_task_index=3, - status=JobStatusTypeV2.DONE, + status=JobStatusType.DONE, **common_job_args, ) job_B = JobV2( first_task_index=1, last_task_index=4, - status=JobStatusTypeV2.SUBMITTED, + status=JobStatusType.SUBMITTED, **common_job_args, ) db.add_all([job_A, job_B]) @@ -212,7 +212,7 @@ async def test_multiple_jobs_error( project_dump={}, first_task_index=0, last_task_index=0, - status=JobStatusTypeV2.SUBMITTED, + status=JobStatusType.SUBMITTED, ) db.add(job) await db.commit() diff --git a/tests/v2/test_03_api/test_api_job.py b/tests/v2/test_03_api/test_api_job.py index 6704984167..5c0108bb9b 100644 --- a/tests/v2/test_03_api/test_api_job.py +++ b/tests/v2/test_03_api/test_api_job.py @@ -11,9 +11,9 @@ _workflow_insert_task, ) from fractal_server.app.schemas.v2 import ResourceType -from fractal_server.app.schemas.v2.dumps import DatasetDumpV2 -from fractal_server.app.schemas.v2.dumps import ProjectDumpV2 -from fractal_server.app.schemas.v2.dumps import WorkflowDumpV2 +from fractal_server.app.schemas.v2.dumps import DatasetDump +from fractal_server.app.schemas.v2.dumps import ProjectDump +from fractal_server.app.schemas.v2.dumps import WorkflowDump from fractal_server.app.schemas.v2.sharing import ProjectPermissions from fractal_server.runner.filenames import SHUTDOWN_FILENAME from fractal_server.runner.filenames import WORKFLOW_LOG_FILENAME @@ -412,13 +412,13 @@ async def test_project_apply_workflow_subset( f"?workflow_id={wf.id}&dataset_id={dataset1.id}", json=dict(first_task_index=0, last_task_index=1), ) - expected_project_dump = ProjectDumpV2( + expected_project_dump = ProjectDump( **json.loads(project.model_dump_json(exclude={"resource_id"})) ).model_dump() - expected_workflow_dump = WorkflowDumpV2( + expected_workflow_dump = WorkflowDump( **json.loads(wf.model_dump_json(exclude={"task_list"})) ).model_dump() - expected_dataset_dump = DatasetDumpV2( + expected_dataset_dump = DatasetDump( **json.loads( dataset1.model_dump_json(exclude={"history", "images"}) ) diff --git a/tests/v2/test_03_api/test_api_project.py b/tests/v2/test_03_api/test_api_project.py index 8bccbd4ade..221a87b28e 100644 --- a/tests/v2/test_03_api/test_api_project.py +++ b/tests/v2/test_03_api/test_api_project.py @@ -9,7 +9,7 @@ from fractal_server.app.routes.api.v2._aux_functions import ( _workflow_insert_task, ) -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType PREFIX = "/api/v2" @@ -258,7 +258,7 @@ async def test_delete_project( workflow_id=wf.id, working_dir=(tmp_path / "some_working_dir").as_posix(), dataset_id=dataset_id, - status=JobStatusTypeV2.DONE, + status=JobStatusType.DONE, ) # Check that a project-related job exists - via query @@ -315,7 +315,7 @@ async def test_delete_project_ongoing_jobs( ): async with MockCurrentUser() as user: - async def get_project_id_linked_to_job(status: JobStatusTypeV2) -> int: + async def get_project_id_linked_to_job(status: JobStatusType) -> int: p = await project_factory_v2(user) d = await dataset_factory_v2(project_id=p.id) w = await workflow_factory_v2(project_id=p.id) @@ -333,10 +333,10 @@ async def get_project_id_linked_to_job(status: JobStatusTypeV2) -> int: ) return p.id - prj_done = await get_project_id_linked_to_job(JobStatusTypeV2.DONE) - prj_failed = await get_project_id_linked_to_job(JobStatusTypeV2.FAILED) + prj_done = await get_project_id_linked_to_job(JobStatusType.DONE) + prj_failed = await get_project_id_linked_to_job(JobStatusType.FAILED) prj_submitted = await get_project_id_linked_to_job( - JobStatusTypeV2.SUBMITTED + JobStatusType.SUBMITTED ) res = await client.delete(f"api/v2/project/{prj_done}/") diff --git a/tests/v2/test_03_api/test_api_task.py b/tests/v2/test_03_api/test_api_task.py index 885d97d645..773b92bca4 100644 --- a/tests/v2/test_03_api/test_api_task.py +++ b/tests/v2/test_03_api/test_api_task.py @@ -6,8 +6,8 @@ from fractal_server.app.models import TaskGroupV2 from fractal_server.app.models import UserGroup -from fractal_server.app.schemas.v2 import TaskCreateV2 -from fractal_server.app.schemas.v2 import TaskUpdateV2 +from fractal_server.app.schemas.v2 import TaskCreate +from fractal_server.app.schemas.v2 import TaskUpdate PREFIX = "/api/v2/task" @@ -123,7 +123,7 @@ async def test_post_task( resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)): # Successful task creations - task = TaskCreateV2( + task = TaskCreate( name="task_name", # Compound command_parallel="task_command_parallel", @@ -156,7 +156,7 @@ async def test_post_task( assert res.json()["authors"] == "Foo Bar + Fractal Team" assert res.json()["tags"] == ["compound", "test", "post", "api"] - task = TaskCreateV2( + task = TaskCreate( name="task_name", command_parallel="task_command_parallel", ) @@ -166,7 +166,7 @@ async def test_post_task( # TaskGroupV2 with same (pkg_name, version, user_id) assert res.status_code == 422 - task = TaskCreateV2( + task = TaskCreate( name="task_name2", # Parallel command_parallel="task_command_parallel", @@ -176,7 +176,7 @@ async def test_post_task( ) assert res.status_code == 201 assert res.json()["type"] == "parallel" - task = TaskCreateV2( + task = TaskCreate( name="task_name3", # Non Parallel command_non_parallel="task_command_non_parallel", @@ -312,7 +312,7 @@ async def test_patch_task_auth( user_kwargs=dict(profile_id=profile.id) ) as user_A: user_A_id = user_A.id - payload_obj = TaskCreateV2( + payload_obj = TaskCreate( name="a", category="my-cat", command_parallel="c" ) res = await client.post( @@ -323,7 +323,7 @@ async def test_patch_task_auth( # PATCH-task success as user_A -> success (task belongs to user) async with MockCurrentUser(user_kwargs=dict(id=user_A_id)) as user_A: - payload_obj = TaskUpdateV2(category="new-cat-1") + payload_obj = TaskUpdate(category="new-cat-1") res = await client.patch( f"{PREFIX}/{task_id}/", json=payload_obj.model_dump(exclude_unset=True), @@ -334,7 +334,7 @@ async def test_patch_task_auth( # PATCH-task failure as a different user -> failure (task belongs to user) async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)): # PATCH-task failure (task does not belong to user) - payload_obj = TaskUpdateV2(category="new-cat-2") + payload_obj = TaskUpdate(category="new-cat-2") res = await client.patch( f"{PREFIX}/{task_id}/", json=payload_obj.model_dump(exclude_unset=True), @@ -360,7 +360,7 @@ async def test_patch_task( ) task_compound = await task_factory_v2(user_id=user_A_id, index=3) # Test successuful patch of task_compound - update = TaskUpdateV2( + update = TaskUpdate( input_types={"input": True, "output": False}, output_types={"input": False, "output": True}, command_parallel="new_cmd_parallel", @@ -386,7 +386,7 @@ async def test_patch_task( async with MockCurrentUser(user_kwargs=dict(id=user_A_id)): # Fail on updating unsetted commands - update_non_parallel = TaskUpdateV2(command_non_parallel="xxx") + update_non_parallel = TaskUpdate(command_non_parallel="xxx") res_compound = await client.patch( f"{PREFIX}/{task_compound.id}/", json=update_non_parallel.model_dump(exclude_unset=True), @@ -403,7 +403,7 @@ async def test_patch_task( assert res_non_parallel.status_code == 200 assert res_parallel.status_code == 422 - update_parallel = TaskUpdateV2(command_parallel="yyy") + update_parallel = TaskUpdate(command_parallel="yyy") res_compound = await client.patch( f"{PREFIX}/{task_compound.id}/", json=update_non_parallel.model_dump(exclude_unset=True), diff --git a/tests/v2/test_03_api/test_api_task_group.py b/tests/v2/test_03_api/test_api_task_group.py index 5bc47bb5c1..d0ece63066 100644 --- a/tests/v2/test_03_api/test_api_task_group.py +++ b/tests/v2/test_03_api/test_api_task_group.py @@ -3,8 +3,8 @@ from fractal_server.app.models import LinkUserGroup from fractal_server.app.models import UserGroup from fractal_server.app.models.v2 import TaskGroupActivityV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus PREFIX = "/api/v2/task-group" @@ -260,8 +260,8 @@ async def test_get_single_task_group_activity(client, MockCurrentUser, db): user_id=user.id, pkg_name="foo", version="1", - status=TaskGroupActivityStatusV2.OK, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.OK, + action=TaskGroupActivityAction.COLLECT, ) db.add(activity) await db.commit() @@ -296,30 +296,30 @@ async def test_get_task_group_activity_list( user_id=user.id, pkg_name="foo", version="1", - status=TaskGroupActivityStatusV2.OK, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.OK, + action=TaskGroupActivityAction.COLLECT, ) activity2 = TaskGroupActivityV2( user_id=user.id, pkg_name="bar", version="1", - status=TaskGroupActivityStatusV2.OK, - action=TaskGroupActivityActionV2.REACTIVATE, + status=TaskGroupActivityStatus.OK, + action=TaskGroupActivityAction.REACTIVATE, ) activity3 = TaskGroupActivityV2( user_id=user.id, pkg_name="foo", version="2", - status=TaskGroupActivityStatusV2.FAILED, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.FAILED, + action=TaskGroupActivityAction.COLLECT, taskgroupv2_id=task.taskgroupv2_id, ) activity4 = TaskGroupActivityV2( user_id=user.id, pkg_name="foo", version="1", - status=TaskGroupActivityStatusV2.OK, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.OK, + action=TaskGroupActivityAction.COLLECT, taskgroupv2_id=task.taskgroupv2_id, ) for activity in [activity1, activity2, activity3, activity4]: diff --git a/tests/v2/test_03_api/test_api_workflow.py b/tests/v2/test_03_api/test_api_workflow.py index 9717690385..6345c12669 100644 --- a/tests/v2/test_03_api/test_api_workflow.py +++ b/tests/v2/test_03_api/test_api_workflow.py @@ -9,7 +9,7 @@ from fractal_server.app.routes.api.v2._aux_functions import ( _workflow_insert_task, ) -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType PREFIX = "api/v2" @@ -164,17 +164,17 @@ async def test_delete_workflow( ) j1 = await job_factory_v2( workflow_id=wf_deletable_1.id, - status=JobStatusTypeV2.DONE, + status=JobStatusType.DONE, **payload, ) j2 = await job_factory_v2( workflow_id=wf_deletable_2.id, - status=JobStatusTypeV2.FAILED, + status=JobStatusType.FAILED, **payload, ) await job_factory_v2( workflow_id=wf_not_deletable_1.id, - status=JobStatusTypeV2.SUBMITTED, + status=JobStatusType.SUBMITTED, **payload, ) res = await client.delete( @@ -415,7 +415,7 @@ async def test_delete_workflow_with_job( workflow_id=workflow.id, dataset_id=dataset.id, working_dir=(tmp_path / "some_working_dir").as_posix(), - status=JobStatusTypeV2.DONE, + status=JobStatusType.DONE, ) assert job.workflow_id == workflow.id diff --git a/tests/v2/test_03_api/test_api_workflow_import_export.py b/tests/v2/test_03_api/test_api_workflow_import_export.py index d2a442aa81..33d2383e18 100644 --- a/tests/v2/test_03_api/test_api_workflow_import_export.py +++ b/tests/v2/test_03_api/test_api_workflow_import_export.py @@ -6,7 +6,7 @@ from fractal_server.app.models import TaskGroupV2 from fractal_server.app.models import TaskV2 from fractal_server.app.models import UserGroup -from fractal_server.app.schemas.v2 import TaskImportV2 +from fractal_server.app.schemas.v2 import TaskImport PREFIX = "api/v2" @@ -294,7 +294,7 @@ async def test_unit_get_task_by_taskimport(): # Test with matching version task_id = await _get_task_by_taskimport( - task_import=TaskImportV2(name="task", pkg_name="pkg", version="1.0.0"), + task_import=TaskImport(name="task", pkg_name="pkg", version="1.0.0"), user_id=1, task_groups_list=task_groups, default_group_id=1, @@ -304,7 +304,7 @@ async def test_unit_get_task_by_taskimport(): # Test with latest version task_id = await _get_task_by_taskimport( - task_import=TaskImportV2( + task_import=TaskImport( name="task", pkg_name="pkg", ), @@ -317,7 +317,7 @@ async def test_unit_get_task_by_taskimport(): # Test with latest version equal to None task_id = await _get_task_by_taskimport( - task_import=TaskImportV2( + task_import=TaskImport( name="task", pkg_name="pkg", ), @@ -330,7 +330,7 @@ async def test_unit_get_task_by_taskimport(): # Test with non-matching version task_id = await _get_task_by_taskimport( - task_import=TaskImportV2( + task_import=TaskImport( name="task", pkg_name="pkg", version="invalid", @@ -344,7 +344,7 @@ async def test_unit_get_task_by_taskimport(): # Test with non-matching pkg_name task_id = await _get_task_by_taskimport( - task_import=TaskImportV2( + task_import=TaskImport( name="task", pkg_name="invalid", ), @@ -357,7 +357,7 @@ async def test_unit_get_task_by_taskimport(): # Test with non-matching name task_id = await _get_task_by_taskimport( - task_import=TaskImportV2( + task_import=TaskImport( name="invalid", pkg_name="pkg", ), diff --git a/tests/v2/test_03_api/test_api_workflow_task.py b/tests/v2/test_03_api/test_api_workflow_task.py index f84b96289a..916f581bb1 100644 --- a/tests/v2/test_03_api/test_api_workflow_task.py +++ b/tests/v2/test_03_api/test_api_workflow_task.py @@ -9,7 +9,7 @@ from fractal_server.app.models.v2 import JobV2 from fractal_server.app.models.v2 import WorkflowTaskV2 from fractal_server.app.models.v2 import WorkflowV2 -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType PREFIX = "api/v2" @@ -279,7 +279,7 @@ async def test_delete_workflow_task( # Fail because of running Job running_job = JobV2( workflow_id=workflow["id"], - status=JobStatusTypeV2.SUBMITTED, + status=JobStatusType.SUBMITTED, user_email="foo@bar.com", dataset_dump={}, workflow_dump={}, @@ -827,7 +827,7 @@ async def test_reorder_task_list_fail( # Fail because of running Job running_job = JobV2( workflow_id=wf_id, - status=JobStatusTypeV2.SUBMITTED, + status=JobStatusType.SUBMITTED, user_email="foo@bar.com", dataset_dump={}, workflow_dump={}, @@ -838,7 +838,7 @@ async def test_reorder_task_list_fail( db.add(running_job) running_job2 = JobV2( workflow_id=wf_id, - status=JobStatusTypeV2.SUBMITTED, + status=JobStatusType.SUBMITTED, user_email="foo@bar.com", dataset_dump={}, workflow_dump={}, diff --git a/tests/v2/test_04_runner/test_unit_submit_workflow.py b/tests/v2/test_04_runner/test_unit_submit_workflow.py index d31e396819..d04cfa6b1b 100644 --- a/tests/v2/test_04_runner/test_unit_submit_workflow.py +++ b/tests/v2/test_04_runner/test_unit_submit_workflow.py @@ -6,7 +6,7 @@ from fractal_server.app.routes.api.v2._aux_functions import ( _workflow_insert_task, ) -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType from fractal_server.runner.v2.submit_workflow import submit_workflow @@ -47,7 +47,7 @@ async def test_fail_submit_workflows_wrong_IDs( workflow_id=workflow.id, working_dir=tmp_path.as_posix(), ) - assert job.status == JobStatusTypeV2.SUBMITTED + assert job.status == JobStatusType.SUBMITTED submit_workflow( workflow_id=9999999, dataset_id=9999999, @@ -58,7 +58,7 @@ async def test_fail_submit_workflows_wrong_IDs( user_cache_dir=tmp_path / "cache", ) await db.refresh(job) - assert job.status == JobStatusTypeV2.FAILED + assert job.status == JobStatusType.FAILED async def test_mkdir_error( diff --git a/tests/v2/test_06_tasks_lifecycle/test_api_task_collection.py b/tests/v2/test_06_tasks_lifecycle/test_api_task_collection.py index 5802d3afb0..76f55ff92d 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_api_task_collection.py +++ b/tests/v2/test_06_tasks_lifecycle/test_api_task_collection.py @@ -10,8 +10,8 @@ from fractal_server.app.routes.api.v2._aux_functions_task_lifecycle import ( get_package_version_from_pypi, ) -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus PREFIX = "api/v2/task" @@ -317,8 +317,8 @@ async def test_contact_an_admin_message( task_group_activity_1 = TaskGroupActivityV2( user_id=userB.id, taskgroupv2_id=task_group.id, - action=TaskGroupActivityActionV2.COLLECT, - status=TaskGroupActivityStatusV2.PENDING, + action=TaskGroupActivityAction.COLLECT, + status=TaskGroupActivityStatus.PENDING, pkg_name="testing-tasks-mock", version="0.1.4", ) @@ -340,8 +340,8 @@ async def test_contact_an_admin_message( task_group_activity_2 = TaskGroupActivityV2( user_id=userB.id, taskgroupv2_id=task_group.id, - action=TaskGroupActivityActionV2.COLLECT, - status=TaskGroupActivityStatusV2.PENDING, + action=TaskGroupActivityAction.COLLECT, + status=TaskGroupActivityStatus.PENDING, pkg_name="testing-tasks-mock", version="0.1.4", ) diff --git a/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_custom.py b/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_custom.py index f407a98436..57bd0c0a1b 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_custom.py +++ b/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_custom.py @@ -1,9 +1,9 @@ import json import sys -from fractal_server.app.schemas.v2 import ManifestV2 +from fractal_server.app.schemas.v2 import Manifest from fractal_server.app.schemas.v2 import ResourceType -from fractal_server.app.schemas.v2 import TaskCollectCustomV2 +from fractal_server.app.schemas.v2 import TaskCollectCustom PREFIX = "api/v2/task" @@ -25,7 +25,7 @@ async def test_task_collection_custom( async with MockCurrentUser( user_kwargs=dict(is_verified=True, profile_id=profile.id) ): - payload_name = TaskCollectCustomV2( + payload_name = TaskCollectCustom( manifest=manifest, python_interpreter=python_bin, label="label", @@ -67,7 +67,7 @@ async def test_task_collection_custom( # Success with package_root package_root = fractal_tasks_mock_collection["package_root"].as_posix() - payload_root = TaskCollectCustomV2( + payload_root = TaskCollectCustom( manifest=manifest, python_interpreter=python_bin, label="label3", @@ -136,8 +136,8 @@ async def test_task_collection_custom_fail_with_ssh( ): res = await client.post( f"{PREFIX}/collect/custom/", - json=TaskCollectCustomV2( - manifest=ManifestV2(**manifest_dict), + json=TaskCollectCustom( + manifest=Manifest(**manifest_dict), python_interpreter="/may/not/exist", label="label", package_root=None, diff --git a/tests/v2/test_06_tasks_lifecycle/test_api_task_lifecycle.py b/tests/v2/test_06_tasks_lifecycle/test_api_task_lifecycle.py index 0a77cf6f41..af0f65678f 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_api_task_lifecycle.py +++ b/tests/v2/test_06_tasks_lifecycle/test_api_task_lifecycle.py @@ -11,10 +11,10 @@ from fractal_server.app.routes.api.v2._aux_functions import ( _workflow_insert_task, ) -from fractal_server.app.schemas.v2 import JobStatusTypeV2 +from fractal_server.app.schemas.v2 import JobStatusType from fractal_server.app.schemas.v2 import ResourceType -from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityAction +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus from fractal_server.config import get_settings from fractal_server.syringe import Inject @@ -100,8 +100,8 @@ async def test_deactivate_task_group_api( activity = res.json() assert res.status_code == 202 assert activity["version"] == "N/A" - assert activity["status"] == TaskGroupActivityStatusV2.OK - assert activity["action"] == TaskGroupActivityActionV2.DEACTIVATE + assert activity["status"] == TaskGroupActivityStatus.OK + assert activity["action"] == TaskGroupActivityAction.DEACTIVATE assert activity["timestamp_started"] is not None assert activity["timestamp_ended"] is not None task_group_other = await db.get(TaskGroupV2, task_other.taskgroupv2_id) @@ -114,8 +114,8 @@ async def test_deactivate_task_group_api( activity = res.json() assert res.status_code == 202 activity_id = activity["id"] - assert activity["status"] == TaskGroupActivityStatusV2.PENDING - assert activity["action"] == TaskGroupActivityActionV2.DEACTIVATE + assert activity["status"] == TaskGroupActivityStatus.PENDING + assert activity["action"] == TaskGroupActivityAction.DEACTIVATE assert activity["timestamp_started"] is not None assert activity["timestamp_ended"] is None task_group_pypi = await db.get(TaskGroupV2, task_pypi.taskgroupv2_id) @@ -200,8 +200,8 @@ async def test_reactivate_task_group_api( activity = res.json() assert res.status_code == 202 assert activity["version"] == "N/A" - assert activity["status"] == TaskGroupActivityStatusV2.OK - assert activity["action"] == TaskGroupActivityActionV2.REACTIVATE + assert activity["status"] == TaskGroupActivityStatus.OK + assert activity["action"] == TaskGroupActivityAction.REACTIVATE assert activity["timestamp_started"] is not None assert activity["timestamp_ended"] is not None task_group_other = await db.get(TaskGroupV2, task_other.taskgroupv2_id) @@ -229,8 +229,8 @@ async def test_reactivate_task_group_api( activity_id = activity["id"] assert res.status_code == 202 assert activity["version"] == task_group_pypi.version - assert activity["status"] == TaskGroupActivityStatusV2.PENDING - assert activity["action"] == TaskGroupActivityActionV2.REACTIVATE + assert activity["status"] == TaskGroupActivityStatus.PENDING + assert activity["action"] == TaskGroupActivityAction.REACTIVATE assert activity["timestamp_started"] is not None assert activity["timestamp_ended"] is None await db.refresh(task_group_pypi) @@ -380,7 +380,7 @@ async def _aux_test_lifecycle( f"(activity ID={task_group_activity_collection['id']}) " "for this task group " f"(ID={task_group_activity_collection['taskgroupv2_id']}), " - f"with status '{TaskGroupActivityStatusV2.OK}'." + f"with status '{TaskGroupActivityStatus.OK}'." ) task_group_path = Path(task_group.path) @@ -390,15 +390,15 @@ async def _aux_test_lifecycle( debug(res.json()) assert res.status_code == 202 activity = res.json() - assert activity["action"] == TaskGroupActivityActionV2.DELETE - assert activity["status"] == TaskGroupActivityStatusV2.PENDING + assert activity["action"] == TaskGroupActivityAction.DELETE + assert activity["status"] == TaskGroupActivityStatus.PENDING # `task_group.path` does not exist anymore assert not Path(task_group.path).exists() res = await client.get(f"api/v2/task-group/activity/{activity['id']}/") activity = res.json() - assert activity["action"] == TaskGroupActivityActionV2.DELETE - assert activity["status"] == TaskGroupActivityStatusV2.OK + assert activity["action"] == TaskGroupActivityAction.DELETE + assert activity["status"] == TaskGroupActivityStatus.OK # We call the collect endpoint again, mocking the backgroud tasks # (for speeding up the test) @@ -419,12 +419,12 @@ def dummy_collect(*args, **kwargs): res = await client.post(f"api/v2/task-group/{task_group_id}/delete/") assert res.status_code == 202 activity = res.json() - assert activity["action"] == TaskGroupActivityActionV2.DELETE - assert activity["status"] == TaskGroupActivityStatusV2.PENDING + assert activity["action"] == TaskGroupActivityAction.DELETE + assert activity["status"] == TaskGroupActivityStatus.PENDING res = await client.get(f"api/v2/task-group/activity/{activity['id']}/") activity = res.json() - assert activity["action"] == TaskGroupActivityActionV2.DELETE - assert activity["status"] == TaskGroupActivityStatusV2.FAILED + assert activity["action"] == TaskGroupActivityAction.DELETE + assert activity["status"] == TaskGroupActivityStatus.FAILED assert "No such file or directory" in activity["log"] @@ -500,8 +500,8 @@ async def test_fail_due_to_ongoing_activities( activity = TaskGroupActivityV2( user_id=user.id, taskgroupv2_id=task_group.id, - action=TaskGroupActivityActionV2.DEACTIVATE, - status=TaskGroupActivityStatusV2.ONGOING, + action=TaskGroupActivityAction.DEACTIVATE, + status=TaskGroupActivityStatus.ONGOING, pkg_name="dummy", version="dummy", ) @@ -570,7 +570,7 @@ async def test_lifecycle_actions_with_submitted_jobs( dataset_dump={}, workflow_dump={}, project_dump={}, - status=JobStatusTypeV2.SUBMITTED, + status=JobStatusType.SUBMITTED, first_task_index=0, last_task_index=1, ) diff --git a/tests/v2/test_06_tasks_lifecycle/test_collect_local.py b/tests/v2/test_06_tasks_lifecycle/test_collect_local.py index 55bdfb8954..344a0284f1 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_collect_local.py +++ b/tests/v2/test_06_tasks_lifecycle/test_collect_local.py @@ -5,8 +5,8 @@ from fractal_server.app.models.v2 import TaskGroupActivityV2 from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.schemas.v2 import FractalUploadedFile -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityActionV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityAction from fractal_server.tasks.v2.local import collect_local @@ -35,8 +35,8 @@ async def test_collect_pip_existing_folder( task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.COLLECT, pkg_name="pkg", version="1.2.3", ) @@ -104,8 +104,8 @@ def patched_function(*args, **kwargs): task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.COLLECT, pkg_name="pkg", version="0.0.1", ) @@ -190,8 +190,8 @@ async def test_invalid_wheel( task_group_activity = TaskGroupActivityV2( user_id=user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.COLLECT, pkg_name="pkg", version="1.0.0", ) @@ -217,7 +217,7 @@ async def test_invalid_wheel( TaskGroupActivityV2, task_group_activity.id ) assert task_group_activity.status == ( - TaskGroupActivityStatusV2.FAILED + TaskGroupActivityStatus.FAILED ) assert task_group_activity.timestamp_ended is not None assert log in task_group_activity.log diff --git a/tests/v2/test_06_tasks_lifecycle/test_collect_pixi_local.py b/tests/v2/test_06_tasks_lifecycle/test_collect_pixi_local.py index b81830af19..31504d2541 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_collect_pixi_local.py +++ b/tests/v2/test_06_tasks_lifecycle/test_collect_pixi_local.py @@ -5,8 +5,8 @@ from fractal_server.app.models.v2 import TaskGroupActivityV2 from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.schemas.v2 import FractalUploadedFile -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityActionV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityAction from fractal_server.tasks.v2.local import collect_local_pixi @@ -44,8 +44,8 @@ async def test_collect_local_pixi_path_exists( task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.COLLECT, pkg_name=task_group.pkg_name, version=task_group.version, ) diff --git a/tests/v2/test_06_tasks_lifecycle/test_deactivate_local.py b/tests/v2/test_06_tasks_lifecycle/test_deactivate_local.py index d5cc012d44..fe8216fbb9 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_deactivate_local.py +++ b/tests/v2/test_06_tasks_lifecycle/test_deactivate_local.py @@ -4,9 +4,9 @@ from fractal_server.app.models.v2 import TaskGroupActivityV2 from fractal_server.app.models.v2 import TaskGroupV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityActionV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityAction from fractal_server.tasks.v2.local import collect_local from fractal_server.tasks.v2.local import deactivate_local from fractal_server.utils import execute_command_sync @@ -33,8 +33,8 @@ async def test_deactivate_fail_no_venv_path( task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name="pkg", version="1.0.0", ) @@ -94,8 +94,8 @@ def fail_function(*args, **kwargs): task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name="pkg", version="1.0.0", ) @@ -130,7 +130,7 @@ async def test_deactivate_wheel_no_archive_path( task_group = TaskGroupV2( pkg_name="pkg", version="1.2.3", - origin=TaskGroupV2OriginEnum.WHEELFILE, + origin=TaskGroupOriginEnum.WHEELFILE, archive_path="/invalid", path=path.as_posix(), venv_path=(path / "venv").as_posix(), @@ -145,8 +145,8 @@ async def test_deactivate_wheel_no_archive_path( task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name="pkg", version="1.0.0", ) @@ -197,7 +197,7 @@ async def test_deactivate_wheel_package_created_before_2_9_0( task_group = TaskGroupV2( pkg_name="fractal_tasks_mock", version="0.0.1", - origin=TaskGroupV2OriginEnum.WHEELFILE, + origin=TaskGroupOriginEnum.WHEELFILE, archive_path=archive_path, path=path.as_posix(), venv_path=venv_path.as_posix(), @@ -212,8 +212,8 @@ async def test_deactivate_wheel_package_created_before_2_9_0( activity_collect = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.COLLECT, pkg_name=task_group.pkg_name, version=task_group.version, ) @@ -230,7 +230,7 @@ async def test_deactivate_wheel_package_created_before_2_9_0( profile=profile, ) activity_collect = await db.get(TaskGroupActivityV2, activity_collect.id) - assert activity_collect.status == TaskGroupActivityStatusV2.OK + assert activity_collect.status == TaskGroupActivityStatus.OK # STEP 2: make it look like a pre-2.9.0 package, both in the db and # in the virtual environment @@ -251,8 +251,8 @@ async def test_deactivate_wheel_package_created_before_2_9_0( activity_deactivate = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, ) @@ -273,7 +273,7 @@ async def test_deactivate_wheel_package_created_before_2_9_0( TaskGroupActivityV2, activity_deactivate.id ) task_group = await db.get(TaskGroupV2, task_group.id) - assert activity_deactivate.status == TaskGroupActivityStatusV2.OK + assert activity_deactivate.status == TaskGroupActivityStatus.OK print(activity_deactivate.log) assert "Recreate pip-freeze information" in activity_deactivate.log @@ -309,8 +309,8 @@ async def test_deactivate_local_github_dependency( task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name="pkg", version="1.0.0", ) diff --git a/tests/v2/test_06_tasks_lifecycle/test_deactivate_ssh.py b/tests/v2/test_06_tasks_lifecycle/test_deactivate_ssh.py index f423f13093..d4c399fafe 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_deactivate_ssh.py +++ b/tests/v2/test_06_tasks_lifecycle/test_deactivate_ssh.py @@ -6,9 +6,9 @@ from fractal_server.app.models.v2 import TaskGroupActivityV2 from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.schemas.v2 import FractalUploadedFile -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityActionV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityAction from fractal_server.ssh._fabric import FractalSSH from fractal_server.tasks.v2.ssh import collect_ssh from fractal_server.tasks.v2.ssh import deactivate_ssh @@ -52,8 +52,8 @@ async def test_deactivate_fail_no_venv_path( task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, ) @@ -125,8 +125,8 @@ def fail_function(*args, **kwargs): task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, ) @@ -170,7 +170,7 @@ async def test_deactivate_wheel_no_archive_path( task_group = TaskGroupV2( pkg_name="pkg", version="1.2.3", - origin=TaskGroupV2OriginEnum.WHEELFILE, + origin=TaskGroupOriginEnum.WHEELFILE, archive_path="/invalid", path=path.as_posix(), venv_path=(path / "venv").as_posix(), @@ -185,8 +185,8 @@ async def test_deactivate_wheel_no_archive_path( task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, ) @@ -249,7 +249,7 @@ async def test_deactivate_wheel_package_created_before_2_9_0( task_group = TaskGroupV2( pkg_name="fractal_tasks_mock", version="0.0.1", - origin=TaskGroupV2OriginEnum.WHEELFILE, + origin=TaskGroupOriginEnum.WHEELFILE, archive_path=archive_path, path=path.as_posix(), venv_path=venv_path.as_posix(), @@ -264,8 +264,8 @@ async def test_deactivate_wheel_package_created_before_2_9_0( activity_collect = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.COLLECT, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.COLLECT, pkg_name=task_group.pkg_name, version=task_group.version, ) @@ -285,7 +285,7 @@ async def test_deactivate_wheel_package_created_before_2_9_0( profile=profile, ) activity_collect = await db.get(TaskGroupActivityV2, activity_collect.id) - assert activity_collect.status == TaskGroupActivityStatusV2.OK + assert activity_collect.status == TaskGroupActivityStatus.OK # STEP 2: make it look like a pre-2.9.0 package, both in the db and # in the virtual environment @@ -306,8 +306,8 @@ async def test_deactivate_wheel_package_created_before_2_9_0( activity_deactivate = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, ) @@ -328,7 +328,7 @@ async def test_deactivate_wheel_package_created_before_2_9_0( TaskGroupActivityV2, activity_deactivate.id ) task_group = await db.get(TaskGroupV2, task_group.id) - assert activity_deactivate.status == TaskGroupActivityStatusV2.OK + assert activity_deactivate.status == TaskGroupActivityStatus.OK print(activity_deactivate.log) assert "Recreate pip-freeze information" in activity_deactivate.log @@ -371,8 +371,8 @@ async def test_deactivate_ssh_github_dependency( task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, ) diff --git a/tests/v2/test_06_tasks_lifecycle/test_reactivate_local.py b/tests/v2/test_06_tasks_lifecycle/test_reactivate_local.py index ffdc54a492..0da786ebf1 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_reactivate_local.py +++ b/tests/v2/test_06_tasks_lifecycle/test_reactivate_local.py @@ -6,8 +6,8 @@ from fractal_server.app.models.v2 import TaskGroupActivityV2 from fractal_server.app.models.v2 import TaskGroupV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityActionV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityAction from fractal_server.tasks.v2.local import reactivate_local @@ -33,8 +33,8 @@ async def test_reactivate_local_venv_exists( task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name="pkg", version="1.0.0", ) @@ -114,8 +114,8 @@ def patched_rmtree(*args, **kwargs): task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.REACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.REACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, ) diff --git a/tests/v2/test_06_tasks_lifecycle/test_reactivate_ssh.py b/tests/v2/test_06_tasks_lifecycle/test_reactivate_ssh.py index d15834fc7c..32a485c662 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_reactivate_ssh.py +++ b/tests/v2/test_06_tasks_lifecycle/test_reactivate_ssh.py @@ -5,8 +5,8 @@ from fractal_server.app.models.v2 import TaskGroupActivityV2 from fractal_server.app.models.v2 import TaskGroupV2 -from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2 -from fractal_server.app.schemas.v2.task_group import TaskGroupActivityActionV2 +from fractal_server.app.schemas.v2 import TaskGroupActivityStatus +from fractal_server.app.schemas.v2.task_group import TaskGroupActivityAction from fractal_server.ssh._fabric import FractalSSH from fractal_server.tasks.v2.ssh import reactivate_ssh @@ -49,8 +49,8 @@ async def test_reactivate_ssh_venv_exists( task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.DEACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.DEACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, ) @@ -139,8 +139,8 @@ def patched_rmtree(*args, **kwargs): task_group_activity = TaskGroupActivityV2( user_id=first_user.id, taskgroupv2_id=task_group.id, - status=TaskGroupActivityStatusV2.PENDING, - action=TaskGroupActivityActionV2.REACTIVATE, + status=TaskGroupActivityStatus.PENDING, + action=TaskGroupActivityAction.REACTIVATE, pkg_name=task_group.pkg_name, version=task_group.version, ) diff --git a/tests/v2/test_06_tasks_lifecycle/test_unit_aux_functions_task_lifecycle.py b/tests/v2/test_06_tasks_lifecycle/test_unit_aux_functions_task_lifecycle.py index 2f69542670..929e899467 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_unit_aux_functions_task_lifecycle.py +++ b/tests/v2/test_06_tasks_lifecycle/test_unit_aux_functions_task_lifecycle.py @@ -6,7 +6,7 @@ from fractal_server.app.routes.api.v2._aux_functions_task_lifecycle import ( check_no_related_workflowtask, ) -from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum +from fractal_server.app.schemas.v2 import TaskGroupOriginEnum async def test_check_no_related_workflowtask( @@ -26,7 +26,7 @@ async def test_check_no_related_workflowtask( ) task_group = TaskGroupV2( user_id=user.id, - origin=TaskGroupV2OriginEnum.OTHER, + origin=TaskGroupOriginEnum.OTHER, pkg_name="pkg", task_list=[task1, task2], resource_id=resource.id, From 1ae52320f0ef24c8a25c8f944c446e1ff0078e3f Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Fri, 28 Nov 2025 15:49:44 +0100 Subject: [PATCH 02/13] change remaining non-db classes with V2 --- fractal_server/app/routes/admin/v2/task.py | 14 +++++++------- .../unit/test_unit_lifecycle_utils.py | 10 +++++----- tests/v2/test_08_backends/test_local_config.py | 4 ++-- tests/v2/test_08_backends/test_slurm_config.py | 12 ++++++------ .../test_09_history/test_unit_api_aux_functions.py | 8 ++++---- 5 files changed, 24 insertions(+), 24 deletions(-) diff --git a/fractal_server/app/routes/admin/v2/task.py b/fractal_server/app/routes/admin/v2/task.py index d96717218d..c96826f233 100644 --- a/fractal_server/app/routes/admin/v2/task.py +++ b/fractal_server/app/routes/admin/v2/task.py @@ -23,7 +23,7 @@ router = APIRouter() -class TaskV2Minimal(BaseModel): +class TaskMinimal(BaseModel): id: int name: str type: str @@ -39,7 +39,7 @@ class ProjectUser(BaseModel): email: EmailStr -class TaskV2Relationship(BaseModel): +class TaskRelationship(BaseModel): workflow_id: int workflow_name: str project_id: int @@ -47,12 +47,12 @@ class TaskV2Relationship(BaseModel): project_users: list[ProjectUser] = Field(default_factory=list) -class TaskV2Info(BaseModel): - task: TaskV2Minimal - relationships: list[TaskV2Relationship] +class TaskInfo(BaseModel): + task: TaskMinimal + relationships: list[TaskRelationship] -@router.get("/", response_model=PaginationResponse[TaskV2Info]) +@router.get("/", response_model=PaginationResponse[TaskInfo]) async def query_tasks( id: int | None = None, source: str | None = None, @@ -66,7 +66,7 @@ async def query_tasks( pagination: PaginationRequest = Depends(get_pagination_params), user: UserOAuth = Depends(current_superuser_act), db: AsyncSession = Depends(get_async_db), -) -> PaginationResponse[TaskV2Info]: +) -> PaginationResponse[TaskInfo]: """ Query `TaskV2` and get information about related workflows and projects. """ diff --git a/tests/v2/test_06_tasks_lifecycle/unit/test_unit_lifecycle_utils.py b/tests/v2/test_06_tasks_lifecycle/unit/test_unit_lifecycle_utils.py index ab22b94b53..068ea0641e 100644 --- a/tests/v2/test_06_tasks_lifecycle/unit/test_unit_lifecycle_utils.py +++ b/tests/v2/test_06_tasks_lifecycle/unit/test_unit_lifecycle_utils.py @@ -4,7 +4,7 @@ from fractal_server.tasks.v2.local._utils import check_task_files_exist -class _MockTaskCreateV2(BaseModel): +class _MockTaskCreate(BaseModel): name: str = "task_name" command_non_parallel: str | None = None command_parallel: str | None = None @@ -19,20 +19,20 @@ def test_check_task_files_exist(tmp_path): # Success check_task_files_exist( task_list=[ - _MockTaskCreateV2(command_non_parallel=f"py {existing_path}"), - _MockTaskCreateV2(command_parallel=f"py {existing_path}"), + _MockTaskCreate(command_non_parallel=f"py {existing_path}"), + _MockTaskCreate(command_parallel=f"py {existing_path}"), ] ) # Failures with pytest.raises(FileNotFoundError) as e: check_task_files_exist( task_list=[ - _MockTaskCreateV2(command_non_parallel=f"py {missing_path}") + _MockTaskCreate(command_non_parallel=f"py {missing_path}") ] ) assert "missing file" in str(e.value) with pytest.raises(FileNotFoundError) as e: check_task_files_exist( - task_list=[_MockTaskCreateV2(command_parallel=f"py {missing_path}")] + task_list=[_MockTaskCreate(command_parallel=f"py {missing_path}")] ) assert "missing file" in str(e.value) diff --git a/tests/v2/test_08_backends/test_local_config.py b/tests/v2/test_08_backends/test_local_config.py index 6b3a02fc39..a9ca25afb8 100644 --- a/tests/v2/test_08_backends/test_local_config.py +++ b/tests/v2/test_08_backends/test_local_config.py @@ -9,11 +9,11 @@ def test_get_local_backend_config(): - class WorkflowTaskV2(object): + class WorkflowTask(object): meta_parallel: dict[str, str] = {"parallel_tasks_per_job": 11} meta_non_parallel: dict[str, Any] = {"parallel_tasks_per_job": 22} - wftask = WorkflowTaskV2() + wftask = WorkflowTask() shared_config = JobRunnerConfigLocal() out = get_local_backend_config( diff --git a/tests/v2/test_08_backends/test_slurm_config.py b/tests/v2/test_08_backends/test_slurm_config.py index 7b2fdaa9db..de0336b0bf 100644 --- a/tests/v2/test_08_backends/test_slurm_config.py +++ b/tests/v2/test_08_backends/test_slurm_config.py @@ -12,12 +12,12 @@ ) -class MockTaskV2(BaseModel): +class MockTask(BaseModel): name: str = "task-name" -class WorkflowTaskV2Mock(BaseModel): - task: MockTaskV2 = Field(default_factory=MockTaskV2) +class WorkflowTaskMock(BaseModel): + task: MockTask = Field(default_factory=MockTask) meta_parallel: dict[str, Any] | None = Field(None) meta_non_parallel: dict[str, Any] | None = Field(None) @@ -78,7 +78,7 @@ def test_get_slurm_config_internal(): constraint=CUSTOM_CONSTRAINT, extra_lines=CUSTOM_EXTRA_LINES, ) - mywftask = WorkflowTaskV2Mock(meta_non_parallel=meta_non_parallel) + mywftask = WorkflowTaskMock(meta_non_parallel=meta_non_parallel) # Call get_slurm_config_internal slurm_config = _get_slurm_config_internal( @@ -145,7 +145,7 @@ def test_get_slurm_config_internal_gpu_options(): assert shared_slurm_config.default_slurm_config.extra_lines == [] # In absence of `needs_gpu`, parameters in `gpu_slurm_config` are not used - mywftask = WorkflowTaskV2Mock() + mywftask = WorkflowTaskMock() slurm_config = _get_slurm_config_internal( shared_config=shared_slurm_config, wftask=mywftask, @@ -155,7 +155,7 @@ def test_get_slurm_config_internal_gpu_options(): assert slurm_config.gpus is None # When `needs_gpu` is set, parameters in `gpu_slurm_config` are used - mywftask = WorkflowTaskV2Mock(meta_non_parallel=dict(needs_gpu=True)) + mywftask = WorkflowTaskMock(meta_non_parallel=dict(needs_gpu=True)) slurm_config = _get_slurm_config_internal( shared_config=shared_slurm_config, wftask=mywftask, diff --git a/tests/v2/test_09_history/test_unit_api_aux_functions.py b/tests/v2/test_09_history/test_unit_api_aux_functions.py index a975de9454..3b7e224d05 100644 --- a/tests/v2/test_09_history/test_unit_api_aux_functions.py +++ b/tests/v2/test_09_history/test_unit_api_aux_functions.py @@ -30,13 +30,13 @@ async def test_get_history_unit_or_404(db): def test_read_log_file(tmp_path: Path): - class MockTaskV2(BaseModel): + class MockTask(BaseModel): name: str = "task-name" - class MockWorkflowTaskV2(BaseModel): - task: MockTaskV2 + class MockWorkflowTask(BaseModel): + task: MockTask - wftask = MockWorkflowTaskV2(task=MockTaskV2()) + wftask = MockWorkflowTask(task=MockTask()) logfile = (tmp_path / "logs.txt").as_posix() From ac70ea6ac9cbaa85691b63e4496f4e921b1a2331 Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Fri, 28 Nov 2025 15:51:58 +0100 Subject: [PATCH 03/13] rollback Manifest and TaskManifest to V2 --- fractal_server/app/schemas/v2/__init__.py | 4 +- fractal_server/app/schemas/v2/manifest.py | 6 +- .../app/schemas/v2/task_collection.py | 4 +- .../json_schemas/generate_manifest_v2.py | 2 +- fractal_server/tasks/v2/local/collect.py | 4 +- fractal_server/tasks/v2/local/collect_pixi.py | 4 +- fractal_server/tasks/v2/ssh/collect.py | 4 +- fractal_server/tasks/v2/ssh/collect_pixi.py | 4 +- fractal_server/tasks/v2/utils_background.py | 4 +- tests/fixtures_tasks_v2.py | 4 +- .../test_01_schemas/test_schemas_manifest.py | 60 ++++++++++--------- .../test_01_schemas/test_task_collection.py | 12 ++-- .../test_unit_json_schemas_v2.py | 4 +- .../test_api_task_collection_custom.py | 4 +- 14 files changed, 61 insertions(+), 59 deletions(-) diff --git a/fractal_server/app/schemas/v2/__init__.py b/fractal_server/app/schemas/v2/__init__.py index 47d90d9d9a..298064e5ee 100644 --- a/fractal_server/app/schemas/v2/__init__.py +++ b/fractal_server/app/schemas/v2/__init__.py @@ -20,8 +20,8 @@ from .job import JobRead # noqa F401 from .job import JobStatusType # noqa F401 from .job import JobUpdate # noqa F401 -from .manifest import Manifest # noqa F401 -from .manifest import TaskManifest # noqa F401 +from .manifest import ManifestV2 # noqa F401 +from .manifest import TaskManifestV2 # noqa F401 from .profile import ProfileCreate # noqa F401 from .profile import ProfileRead # noqa F401 from .profile import ValidProfileLocal # noqa F401 diff --git a/fractal_server/app/schemas/v2/manifest.py b/fractal_server/app/schemas/v2/manifest.py index abea66f921..6abfedb2c8 100644 --- a/fractal_server/app/schemas/v2/manifest.py +++ b/fractal_server/app/schemas/v2/manifest.py @@ -11,7 +11,7 @@ from .task import TaskType -class TaskManifest(BaseModel): +class TaskManifestV2(BaseModel): """ Represents a task within a manifest. @@ -106,7 +106,7 @@ def validate_executable_args_meta(self): return self -class Manifest(BaseModel): +class ManifestV2(BaseModel): """ Packages containing tasks are required to include a special file `__FRACTAL_MANIFEST__.json` in order to be discovered and used by Fractal. @@ -131,7 +131,7 @@ class Manifest(BaseModel): """ manifest_version: Literal["2"] - task_list: list[TaskManifest] + task_list: list[TaskManifestV2] has_args_schemas: bool = False args_schema_version: str | None = None authors: NonEmptyStr | None = None diff --git a/fractal_server/app/schemas/v2/task_collection.py b/fractal_server/app/schemas/v2/task_collection.py index bc6da20861..6082b73ea9 100644 --- a/fractal_server/app/schemas/v2/task_collection.py +++ b/fractal_server/app/schemas/v2/task_collection.py @@ -5,7 +5,7 @@ from pydantic import field_validator from pydantic import model_validator -from fractal_server.app.schemas.v2 import Manifest +from fractal_server.app.schemas.v2 import ManifestV2 from fractal_server.string_tools import validate_cmd from fractal_server.types import AbsolutePathStr from fractal_server.types import DictStrStr @@ -110,7 +110,7 @@ class TaskCollectCustom(BaseModel): """ model_config = ConfigDict(extra="forbid") - manifest: Manifest + manifest: ManifestV2 python_interpreter: AbsolutePathStr label: NonEmptyStr package_root: AbsolutePathStr | None = None diff --git a/fractal_server/json_schemas/generate_manifest_v2.py b/fractal_server/json_schemas/generate_manifest_v2.py index 1c45fc6dc3..2ec53859e0 100644 --- a/fractal_server/json_schemas/generate_manifest_v2.py +++ b/fractal_server/json_schemas/generate_manifest_v2.py @@ -3,7 +3,7 @@ import fractal_server.app.schemas.v2 as v2 -new_schema = v2.manifest.Manifest.model_json_schema() +new_schema = v2.manifest.ManifestV2.model_json_schema() json_schema_path = ( Path(v2.__file__).parents[3] / "json_schemas/manifest_v2.json" ) diff --git a/fractal_server/tasks/v2/local/collect.py b/fractal_server/tasks/v2/local/collect.py index c649469e65..040499c2ff 100644 --- a/fractal_server/tasks/v2/local/collect.py +++ b/fractal_server/tasks/v2/local/collect.py @@ -11,7 +11,7 @@ from fractal_server.app.schemas.v2 import FractalUploadedFile from fractal_server.app.schemas.v2 import TaskGroupActivityAction from fractal_server.app.schemas.v2 import TaskGroupActivityStatus -from fractal_server.app.schemas.v2.manifest import Manifest +from fractal_server.app.schemas.v2.manifest import ManifestV2 from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.tasks.utils import get_log_path @@ -215,7 +215,7 @@ def collect_local( pkg_manifest_dict = json.load(json_data) logger.info(f"loaded {manifest_path=}") logger.info("now validating manifest content") - pkg_manifest = Manifest(**pkg_manifest_dict) + pkg_manifest = ManifestV2(**pkg_manifest_dict) logger.info("validated manifest content") activity.log = get_current_log(log_file_path) activity = add_commit_refresh(obj=activity, db=db) diff --git a/fractal_server/tasks/v2/local/collect_pixi.py b/fractal_server/tasks/v2/local/collect_pixi.py index fb1049879a..44c9b70175 100644 --- a/fractal_server/tasks/v2/local/collect_pixi.py +++ b/fractal_server/tasks/v2/local/collect_pixi.py @@ -10,7 +10,7 @@ from fractal_server.app.schemas.v2 import FractalUploadedFile from fractal_server.app.schemas.v2 import TaskGroupActivityAction from fractal_server.app.schemas.v2 import TaskGroupActivityStatus -from fractal_server.app.schemas.v2.manifest import Manifest +from fractal_server.app.schemas.v2.manifest import ManifestV2 from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.tasks.utils import get_log_path @@ -194,7 +194,7 @@ def collect_local_pixi( pkg_manifest_dict = json.load(json_data) logger.info(f"loaded {manifest_path=}") logger.info("now validating manifest content") - pkg_manifest = Manifest(**pkg_manifest_dict) + pkg_manifest = ManifestV2(**pkg_manifest_dict) logger.info("validated manifest content") activity.log = get_current_log(log_file_path) activity = add_commit_refresh(obj=activity, db=db) diff --git a/fractal_server/tasks/v2/ssh/collect.py b/fractal_server/tasks/v2/ssh/collect.py index 53d5e5d247..72832f3397 100644 --- a/fractal_server/tasks/v2/ssh/collect.py +++ b/fractal_server/tasks/v2/ssh/collect.py @@ -8,7 +8,7 @@ from fractal_server.app.schemas.v2 import FractalUploadedFile from fractal_server.app.schemas.v2 import TaskGroupActivityAction from fractal_server.app.schemas.v2 import TaskGroupActivityStatus -from fractal_server.app.schemas.v2.manifest import Manifest +from fractal_server.app.schemas.v2.manifest import ManifestV2 from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.ssh._fabric import SingleUseFractalSSH @@ -254,7 +254,7 @@ def collect_ssh( manifest_path_remote ) logger.info(f"Loaded {manifest_path_remote=}") - pkg_manifest = Manifest(**pkg_manifest_dict) + pkg_manifest = ManifestV2(**pkg_manifest_dict) logger.info("Manifest is a valid ManifestV2") logger.info("_prepare_tasks_metadata - start") diff --git a/fractal_server/tasks/v2/ssh/collect_pixi.py b/fractal_server/tasks/v2/ssh/collect_pixi.py index 53b538a5bf..36efa5f4fe 100644 --- a/fractal_server/tasks/v2/ssh/collect_pixi.py +++ b/fractal_server/tasks/v2/ssh/collect_pixi.py @@ -8,7 +8,7 @@ from fractal_server.app.schemas.v2 import FractalUploadedFile from fractal_server.app.schemas.v2 import TaskGroupActivityAction from fractal_server.app.schemas.v2 import TaskGroupActivityStatus -from fractal_server.app.schemas.v2.manifest import Manifest +from fractal_server.app.schemas.v2.manifest import ManifestV2 from fractal_server.logger import reset_logger_handlers from fractal_server.logger import set_logger from fractal_server.ssh._fabric import SingleUseFractalSSH @@ -280,7 +280,7 @@ def collect_ssh_pixi( manifest_path_remote ) logger.info(f"Loaded {manifest_path_remote=}") - pkg_manifest = Manifest(**pkg_manifest_dict) + pkg_manifest = ManifestV2(**pkg_manifest_dict) logger.info("Manifest is a valid ManifestV2") logger.info("_prepare_tasks_metadata - start") diff --git a/fractal_server/tasks/v2/utils_background.py b/fractal_server/tasks/v2/utils_background.py index 500e28972e..453b803b41 100644 --- a/fractal_server/tasks/v2/utils_background.py +++ b/fractal_server/tasks/v2/utils_background.py @@ -8,7 +8,7 @@ from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.schemas.v2 import TaskCreate from fractal_server.app.schemas.v2 import TaskGroupActivityStatus -from fractal_server.app.schemas.v2.manifest import Manifest +from fractal_server.app.schemas.v2.manifest import ManifestV2 from fractal_server.app.schemas.v2.task_group import TaskGroupActivityAction from fractal_server.exceptions import UnreachableBranchError from fractal_server.logger import get_logger @@ -78,7 +78,7 @@ def fail_and_cleanup( def prepare_tasks_metadata( *, - package_manifest: Manifest, + package_manifest: ManifestV2, package_root: Path, python_bin: Path | None = None, project_python_wrapper: Path | None = None, diff --git a/tests/fixtures_tasks_v2.py b/tests/fixtures_tasks_v2.py index c396dd3291..02392f9fe0 100644 --- a/tests/fixtures_tasks_v2.py +++ b/tests/fixtures_tasks_v2.py @@ -17,7 +17,7 @@ from fractal_server.app.models.v2 import Resource from fractal_server.app.models.v2 import TaskGroupV2 from fractal_server.app.models.v2 import TaskV2 -from fractal_server.app.schemas.v2 import Manifest +from fractal_server.app.schemas.v2 import ManifestV2 from fractal_server.app.schemas.v2 import TaskCreate from fractal_server.app.schemas.v2 import TaskGroupCreate from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata @@ -70,7 +70,7 @@ def fractal_tasks_mock_collection( with open(package_root / "__FRACTAL_MANIFEST__.json") as f: manifest_dict = json.load(f) - manifest = Manifest(**manifest_dict) + manifest = ManifestV2(**manifest_dict) task_list: list[TaskCreate] = prepare_tasks_metadata( package_manifest=manifest, python_bin=venv_python, diff --git a/tests/v2/test_01_schemas/test_schemas_manifest.py b/tests/v2/test_01_schemas/test_schemas_manifest.py index 3bfa5bb67d..aaa2b6f3a9 100644 --- a/tests/v2/test_01_schemas/test_schemas_manifest.py +++ b/tests/v2/test_01_schemas/test_schemas_manifest.py @@ -1,8 +1,8 @@ import pytest from pydantic import ValidationError -from fractal_server.app.schemas.v2.manifest import Manifest -from fractal_server.app.schemas.v2.manifest import TaskManifest +from fractal_server.app.schemas.v2.manifest import ManifestV2 +from fractal_server.app.schemas.v2.manifest import TaskManifestV2 def msg(e: pytest.ExceptionInfo) -> str: @@ -10,19 +10,19 @@ def msg(e: pytest.ExceptionInfo) -> str: def test_TaskManifestV2(): - assert TaskManifest(name="task", executable_parallel="exec") - assert TaskManifest(name="task", executable_non_parallel="exec") - assert TaskManifest( + assert TaskManifestV2(name="task", executable_parallel="exec") + assert TaskManifestV2(name="task", executable_non_parallel="exec") + assert TaskManifestV2( name="task", executable_parallel="exec", executable_non_parallel="exec" ) # 1: no executable with pytest.raises(ValidationError): - TaskManifest(name="task") + TaskManifestV2(name="task") # 2: parallel with non_parallel meta with pytest.raises(ValidationError) as e: - TaskManifest( + TaskManifestV2( name="task", executable_parallel="exec", meta_non_parallel={"a": "b"}, @@ -31,7 +31,7 @@ def test_TaskManifestV2(): # 3: parallel with non_parallel args_schema with pytest.raises(ValidationError) as e: - TaskManifest( + TaskManifestV2( name="task", executable_parallel="exec", args_schema_non_parallel={"a": "b"}, @@ -40,7 +40,7 @@ def test_TaskManifestV2(): # 4: non_parallel with parallel meta with pytest.raises(ValidationError) as e: - TaskManifest( + TaskManifestV2( name="task", executable_non_parallel="exec", meta_parallel={"a": "b"}, @@ -49,7 +49,7 @@ def test_TaskManifestV2(): # 5: non_parallel with parallel args_schema with pytest.raises(ValidationError) as e: - TaskManifest( + TaskManifestV2( name="task", executable_non_parallel="exec", args_schema_parallel={"a": "b"}, @@ -61,12 +61,12 @@ def test_TaskManifestV2(): ValidationError, match="Input should be a valid URL", ): - TaskManifest( + TaskManifestV2( name="task", executable_parallel="exec", docs_link="not-an-url", ) - TaskManifest( + TaskManifestV2( name="task", executable_parallel="exec", docs_link="https://url.com", @@ -74,50 +74,52 @@ def test_TaskManifestV2(): def test_ManifestV2(): - assert Manifest(manifest_version="2", task_list=[]) + assert ManifestV2(manifest_version="2", task_list=[]) - compound_both_schemas = TaskManifest( + compound_both_schemas = TaskManifestV2( name="task1", executable_parallel="exec", args_schema_parallel={"a": "b"}, executable_non_parallel="exec", args_schema_non_parallel={"a": "b"}, ) - compound_just_parallel_schemas = TaskManifest( + compound_just_parallel_schemas = TaskManifestV2( name="task2", executable_parallel="exec", args_schema_parallel={"a": "b"}, executable_non_parallel="exec", ) - compound_just_non_parallel_schemas = TaskManifest( + compound_just_non_parallel_schemas = TaskManifestV2( name="task3", executable_parallel="exec", executable_non_parallel="exec", args_schema_non_parallel={"a": "b"}, ) - compound_no_schemas = TaskManifest( + compound_no_schemas = TaskManifestV2( name="task4", executable_parallel="exec", executable_non_parallel="exec", ) - parallel_schema = TaskManifest( + parallel_schema = TaskManifestV2( name="task5", executable_parallel="exec", args_schema_parallel={"a": "b"}, ) - parallel_no_schema = TaskManifest(name="task6", executable_parallel="exec") + parallel_no_schema = TaskManifestV2( + name="task6", executable_parallel="exec" + ) - non_parallel_schema = TaskManifest( + non_parallel_schema = TaskManifestV2( name="task7", executable_non_parallel="exec", args_schema_non_parallel={"a": "b"}, ) - non_parallel_no_schema = TaskManifest( + non_parallel_no_schema = TaskManifestV2( name="task8", executable_non_parallel="exec" ) - assert Manifest( + assert ManifestV2( manifest_version="2", has_args_schemas=True, task_list=[ @@ -129,12 +131,12 @@ def test_ManifestV2(): # 1: invalid manifest_version with pytest.raises(ValidationError) as exc_info: - Manifest(manifest_version="1", task_list=[]) + ManifestV2(manifest_version="1", task_list=[]) print(exc_info.value) # 2: compound_just_parallel_schemas with pytest.raises(ValidationError) as e: - Manifest( + ManifestV2( manifest_version="2", has_args_schemas=True, task_list=[ @@ -147,7 +149,7 @@ def test_ManifestV2(): # 3: compound_just_parallel_schemas with pytest.raises(ValidationError) as e: - Manifest( + ManifestV2( manifest_version="2", has_args_schemas=True, task_list=[ @@ -160,7 +162,7 @@ def test_ManifestV2(): # 4: compound_no_schemas with pytest.raises(ValidationError) as e: - Manifest( + ManifestV2( manifest_version="2", has_args_schemas=True, task_list=[ @@ -173,7 +175,7 @@ def test_ManifestV2(): # 5: parallel_no_schema with pytest.raises(ValidationError) as e: - Manifest( + ManifestV2( manifest_version="2", has_args_schemas=True, task_list=[ @@ -186,7 +188,7 @@ def test_ManifestV2(): # 6: non_parallel_no_schema with pytest.raises(ValidationError) as e: - Manifest( + ManifestV2( manifest_version="2", has_args_schemas=True, task_list=[ @@ -199,7 +201,7 @@ def test_ManifestV2(): # 7: Non-unique task names with pytest.raises(ValidationError) as e: - Manifest( + ManifestV2( manifest_version="2", has_args_schemas=True, task_list=[ diff --git a/tests/v2/test_01_schemas/test_task_collection.py b/tests/v2/test_01_schemas/test_task_collection.py index 422cbec185..3eacdf4451 100644 --- a/tests/v2/test_01_schemas/test_task_collection.py +++ b/tests/v2/test_01_schemas/test_task_collection.py @@ -3,7 +3,7 @@ import pytest from pydantic import ValidationError -from fractal_server.app.schemas.v2 import Manifest +from fractal_server.app.schemas.v2 import ManifestV2 from fractal_server.app.schemas.v2 import TaskCollectCustom from fractal_server.app.schemas.v2 import TaskCollectPip from fractal_server.app.schemas.v2 import TaskGroupCreateStrict @@ -77,7 +77,7 @@ async def test_TaskCollectCustomV2(testdata_path): with pytest.raises(ValidationError) as e: TaskCollectCustom( - manifest=Manifest(**manifest_dict), + manifest=ManifestV2(**manifest_dict), python_interpreter="/a", label="b", package_root=None, @@ -87,7 +87,7 @@ async def test_TaskCollectCustomV2(testdata_path): with pytest.raises(ValidationError) as e: TaskCollectCustom( - manifest=Manifest(**manifest_dict), + manifest=ManifestV2(**manifest_dict), python_interpreter="a", label="name", package_root=None, @@ -97,7 +97,7 @@ async def test_TaskCollectCustomV2(testdata_path): with pytest.raises(ValidationError) as e: TaskCollectCustom( - manifest=Manifest(**manifest_dict), + manifest=ManifestV2(**manifest_dict), python_interpreter="/a", label="name", package_root="non_absolute_path", @@ -108,7 +108,7 @@ async def test_TaskCollectCustomV2(testdata_path): # Fail because neither 'package_root' nor 'package_name' with pytest.raises(ValidationError) as e: TaskCollectCustom( - manifest=Manifest(**manifest_dict), + manifest=ManifestV2(**manifest_dict), python_interpreter="/a", label="name", package_root=None, @@ -119,7 +119,7 @@ async def test_TaskCollectCustomV2(testdata_path): # Successful collection = TaskCollectCustom( - manifest=Manifest(**manifest_dict), + manifest=ManifestV2(**manifest_dict), python_interpreter=" /some/python ", label="b", package_root=" /somewhere ", diff --git a/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py b/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py index 4db68cc368..1de4b814e3 100644 --- a/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py +++ b/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py @@ -2,7 +2,7 @@ from pathlib import Path import fractal_server.app.schemas.v2 as v2 -from fractal_server.app.schemas.v2.manifest import Manifest +from fractal_server.app.schemas.v2.manifest import ManifestV2 def test_ManifestV2_jsonschema(): @@ -15,5 +15,5 @@ def test_ManifestV2_jsonschema(): ) with json_schema_path.open("r") as f: current_schema = json.load(f) - new_schema = Manifest.model_json_schema() + new_schema = ManifestV2.model_json_schema() assert new_schema == current_schema diff --git a/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_custom.py b/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_custom.py index 57bd0c0a1b..79e2feaaad 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_custom.py +++ b/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_custom.py @@ -1,7 +1,7 @@ import json import sys -from fractal_server.app.schemas.v2 import Manifest +from fractal_server.app.schemas.v2 import ManifestV2 from fractal_server.app.schemas.v2 import ResourceType from fractal_server.app.schemas.v2 import TaskCollectCustom @@ -137,7 +137,7 @@ async def test_task_collection_custom_fail_with_ssh( res = await client.post( f"{PREFIX}/collect/custom/", json=TaskCollectCustom( - manifest=Manifest(**manifest_dict), + manifest=ManifestV2(**manifest_dict), python_interpreter="/may/not/exist", label="label", package_root=None, From bcf055d25073e9ed9243090e8b062be64c687f55 Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Fri, 28 Nov 2025 16:02:01 +0100 Subject: [PATCH 04/13] remove v2 from function names --- .../app/routes/api/v2/_aux_functions.py | 2 +- fractal_server/app/routes/api/v2/submit.py | 6 +- fractal_server/runner/v2/_local.py | 4 +- fractal_server/runner/v2/_slurm_ssh.py | 4 +- fractal_server/runner/v2/_slurm_sudo.py | 4 +- fractal_server/runner/v2/runner.py | 14 +- fractal_server/runner/v2/runner_functions.py | 12 +- tests/fixtures_server_v2.py | 16 +- tests/fixtures_tasks_v2.py | 2 +- tests/no_version/test_unit_lifespan.py | 22 +- .../test_01_schemas/test_schemas_dataset.py | 2 +- .../test_01_schemas/test_schemas_workflow.py | 4 +- .../test_01_schemas/test_task_collection.py | 6 +- .../test_unit_json_schemas_v2.py | 2 +- tests/v2/test_02_models/test_tasks_v2.py | 2 +- tests/v2/test_03_api/admin/test_admin_job.py | 106 ++++--- .../v2/test_03_api/admin/test_admin_others.py | 18 +- .../test_03_api/admin/test_admin_taskgroup.py | 60 ++-- tests/v2/test_03_api/test_api_dataset.py | 66 ++--- .../v2/test_03_api/test_api_dataset_images.py | 48 ++-- tests/v2/test_03_api/test_api_history.py | 28 +- tests/v2/test_03_api/test_api_job.py | 242 ++++++++-------- tests/v2/test_03_api/test_api_project.py | 50 ++-- tests/v2/test_03_api/test_api_sharing.py | 18 +- tests/v2/test_03_api/test_api_task.py | 22 +- tests/v2/test_03_api/test_api_task_group.py | 34 ++- .../test_api_verify_image_types.py | 44 +-- tests/v2/test_03_api/test_api_workflow.py | 106 ++++--- .../test_api_workflow_import_export.py | 50 ++-- .../v2/test_03_api/test_api_workflow_task.py | 98 ++++--- .../test_get_task_group_read_access.py | 8 +- .../test_loss_of_access_to_task.py | 18 +- tests/v2/test_03_api/test_status_legacy.py | 26 +- .../test_submission_job_list_v2.py | 30 +- .../test_03_api/test_task_version_update.py | 34 ++- .../v2/test_03_api/test_unit_aux_functions.py | 80 +++--- .../test_unit_aux_functions_tasks.py | 12 +- tests/v2/test_03_api/test_unit_timezone.py | 4 +- tests/v2/test_04_runner/execute_tasks_v2.py | 6 +- .../v2/test_04_runner/test_dummy_examples.py | 264 +++++++++--------- .../test_04_runner/test_fractal_examples.py | 216 +++++++------- .../test_no_images_parallelization.py | 34 +-- tests/v2/test_04_runner/test_unit_db_tools.py | 24 +- .../test_unit_submit_workflow.py | 60 ++-- .../test_api_task_lifecycle.py | 42 +-- .../test_unit_aux_functions_task_lifecycle.py | 12 +- .../test_full_workflow_local.py | 84 +++--- .../test_full_workflow_slurm_ssh.py | 32 +-- .../test_full_workflow_slurm_sudo.py | 64 ++--- tests/v2/test_08_backends/aux_unit_runner.py | 24 +- tests/v2/test_09_history/test_history_api.py | 186 ++++++------ .../test_unit_api_aux_functions.py | 18 +- tests/v2/test_09_history/test_unit_upsert.py | 24 +- 53 files changed, 1177 insertions(+), 1217 deletions(-) diff --git a/fractal_server/app/routes/api/v2/_aux_functions.py b/fractal_server/app/routes/api/v2/_aux_functions.py index 1d49c1b384..c35654db46 100644 --- a/fractal_server/app/routes/api/v2/_aux_functions.py +++ b/fractal_server/app/routes/api/v2/_aux_functions.py @@ -453,7 +453,7 @@ async def _workflow_insert_task( return wf_task -async def clean_app_job_list_v2( +async def clean_app_job_list( db: AsyncSession, jobs_list: list[int] ) -> list[int]: """ diff --git a/fractal_server/app/routes/api/v2/submit.py b/fractal_server/app/routes/api/v2/submit.py index 54a65cf411..04d351d471 100644 --- a/fractal_server/app/routes/api/v2/submit.py +++ b/fractal_server/app/routes/api/v2/submit.py @@ -38,7 +38,7 @@ from ._aux_functions import _get_dataset_check_access from ._aux_functions import _get_workflow_check_access -from ._aux_functions import clean_app_job_list_v2 +from ._aux_functions import clean_app_job_list from ._aux_functions_tasks import _check_type_filters_compatibility FRACTAL_CACHE_DIR = ".fractal_cache" @@ -68,9 +68,7 @@ async def apply_workflow( # somewhat slow. settings = Inject(get_settings) if len(request.app.state.jobsV2) > settings.FRACTAL_API_MAX_JOB_LIST_LENGTH: - new_jobs_list = await clean_app_job_list_v2( - db, request.app.state.jobsV2 - ) + new_jobs_list = await clean_app_job_list(db, request.app.state.jobsV2) request.app.state.jobsV2 = new_jobs_list output = await _get_dataset_check_access( diff --git a/fractal_server/runner/v2/_local.py b/fractal_server/runner/v2/_local.py index a8d8e86950..2860718cc8 100644 --- a/fractal_server/runner/v2/_local.py +++ b/fractal_server/runner/v2/_local.py @@ -14,7 +14,7 @@ from fractal_server.ssh._fabric import FractalSSH from fractal_server.types import AttributeFilters -from .runner import execute_tasks_v2 +from .runner import execute_tasks def process_workflow( @@ -90,7 +90,7 @@ def process_workflow( resource=resource, profile=profile, ) as runner: - execute_tasks_v2( + execute_tasks( wf_task_list=workflow.task_list[ first_task_index : (last_task_index + 1) ], diff --git a/fractal_server/runner/v2/_slurm_ssh.py b/fractal_server/runner/v2/_slurm_ssh.py index 7551d8c7e9..e33f86f755 100644 --- a/fractal_server/runner/v2/_slurm_ssh.py +++ b/fractal_server/runner/v2/_slurm_ssh.py @@ -33,7 +33,7 @@ from fractal_server.ssh._fabric import FractalSSH from fractal_server.types import AttributeFilters -from .runner import execute_tasks_v2 +from .runner import execute_tasks logger = set_logger(__name__) @@ -113,7 +113,7 @@ def process_workflow( common_script_lines=worker_init, user_cache_dir=user_cache_dir, ) as runner: - execute_tasks_v2( + execute_tasks( wf_task_list=workflow.task_list[ first_task_index : (last_task_index + 1) ], diff --git a/fractal_server/runner/v2/_slurm_sudo.py b/fractal_server/runner/v2/_slurm_sudo.py index 299c14dbb6..5eeae4aa44 100644 --- a/fractal_server/runner/v2/_slurm_sudo.py +++ b/fractal_server/runner/v2/_slurm_sudo.py @@ -32,7 +32,7 @@ from fractal_server.ssh._fabric import FractalSSH from fractal_server.types import AttributeFilters -from .runner import execute_tasks_v2 +from .runner import execute_tasks def process_workflow( @@ -109,7 +109,7 @@ def process_workflow( user_cache_dir=user_cache_dir, slurm_account=slurm_account, ) as runner: - execute_tasks_v2( + execute_tasks( wf_task_list=workflow.task_list[ first_task_index : (last_task_index + 1) ], diff --git a/fractal_server/runner/v2/runner.py b/fractal_server/runner/v2/runner.py index 73f3bfdad6..09eaabd84d 100644 --- a/fractal_server/runner/v2/runner.py +++ b/fractal_server/runner/v2/runner.py @@ -35,9 +35,9 @@ from .merge_outputs import merge_outputs from .runner_functions import GetRunnerConfigType from .runner_functions import SubmissionOutcome -from .runner_functions import run_v2_task_compound -from .runner_functions import run_v2_task_non_parallel -from .runner_functions import run_v2_task_parallel +from .runner_functions import run_task_compound +from .runner_functions import run_task_non_parallel +from .runner_functions import run_task_parallel from .task_interface import TaskOutput @@ -82,7 +82,7 @@ def get_origin_attribute_and_types( return updated_attributes, updated_types -def execute_tasks_v2( +def execute_tasks( *, wf_task_list: list[WorkflowTaskV2], dataset: DatasetV2, @@ -224,7 +224,7 @@ def execute_tasks_v2( TaskType.NON_PARALLEL, TaskType.CONVERTER_NON_PARALLEL, ]: - outcomes_dict, num_tasks = run_v2_task_non_parallel( + outcomes_dict, num_tasks = run_task_non_parallel( images=filtered_images, zarr_dir=zarr_dir, wftask=wftask, @@ -239,7 +239,7 @@ def execute_tasks_v2( user_id=user_id, ) elif task.type == TaskType.PARALLEL: - outcomes_dict, num_tasks = run_v2_task_parallel( + outcomes_dict, num_tasks = run_task_parallel( images=filtered_images, wftask=wftask, task=task, @@ -255,7 +255,7 @@ def execute_tasks_v2( TaskType.COMPOUND, TaskType.CONVERTER_COMPOUND, ]: - outcomes_dict, num_tasks = run_v2_task_compound( + outcomes_dict, num_tasks = run_task_compound( images=filtered_images, zarr_dir=zarr_dir, wftask=wftask, diff --git a/fractal_server/runner/v2/runner_functions.py b/fractal_server/runner/v2/runner_functions.py index d8790b13a5..8d5f41ca19 100644 --- a/fractal_server/runner/v2/runner_functions.py +++ b/fractal_server/runner/v2/runner_functions.py @@ -64,9 +64,9 @@ def __call__( __all__ = [ - "run_v2_task_parallel", - "run_v2_task_non_parallel", - "run_v2_task_compound", + "run_task_parallel", + "run_task_non_parallel", + "run_task_compound", ] @@ -145,7 +145,7 @@ def _check_parallelization_list_size(my_list): ) -def run_v2_task_non_parallel( +def run_task_non_parallel( *, images: list[dict[str, Any]], zarr_dir: str, @@ -265,7 +265,7 @@ def run_v2_task_non_parallel( return outcome, num_tasks -def run_v2_task_parallel( +def run_task_parallel( *, images: list[dict[str, Any]], task: TaskV2, @@ -388,7 +388,7 @@ def run_v2_task_parallel( return outcome, num_tasks -def run_v2_task_compound( +def run_task_compound( *, images: list[dict[str, Any]], zarr_dir: str, diff --git a/tests/fixtures_server_v2.py b/tests/fixtures_server_v2.py index b088eae903..d02d1e957d 100644 --- a/tests/fixtures_server_v2.py +++ b/tests/fixtures_server_v2.py @@ -38,7 +38,7 @@ @pytest.fixture -async def project_factory_v2(db): +async def project_factory(db): """ Factory that adds a ProjectV2 to the database """ @@ -79,12 +79,12 @@ async def __project_factory(user, **kwargs): @pytest.fixture -async def dataset_factory_v2(db: AsyncSession, tmp_path): +async def dataset_factory(db: AsyncSession, tmp_path): """ Insert DatasetV2 in db """ - async def __dataset_factory_v2(db: AsyncSession = db, **kwargs): + async def __dataset_factory(db: AsyncSession = db, **kwargs): defaults = dict( name="My Dataset", project_id=1, @@ -112,11 +112,11 @@ async def __dataset_factory_v2(db: AsyncSession = db, **kwargs): await db.refresh(_dataset) return _dataset - return __dataset_factory_v2 + return __dataset_factory @pytest.fixture -async def workflow_factory_v2(db: AsyncSession): +async def workflow_factory(db: AsyncSession): """ Insert WorkflowV2 in db """ @@ -147,7 +147,7 @@ async def __workflow_factory(db: AsyncSession = db, **kwargs): @pytest.fixture -async def job_factory_v2(db: AsyncSession): +async def job_factory(db: AsyncSession): """ Insert JobV2 in db """ @@ -216,7 +216,7 @@ async def __job_factory( @pytest.fixture -async def task_factory_v2(db: AsyncSession): +async def task_factory(db: AsyncSession): """ Insert TaskV2 in db """ @@ -330,7 +330,7 @@ async def __task_factory( @pytest.fixture -async def workflowtask_factory_v2(db: AsyncSession): +async def workflowtask_factory(db: AsyncSession): """ Insert workflowtaskv2 in db """ diff --git a/tests/fixtures_tasks_v2.py b/tests/fixtures_tasks_v2.py index 02392f9fe0..49ca6264f1 100644 --- a/tests/fixtures_tasks_v2.py +++ b/tests/fixtures_tasks_v2.py @@ -121,7 +121,7 @@ def fractal_tasks_mock_db( @pytest.fixture(scope="function") -def relink_python_interpreter_v2( +def relink_python_interpreter( fractal_tasks_mock_collection, current_py_version: str ): """ diff --git a/tests/no_version/test_unit_lifespan.py b/tests/no_version/test_unit_lifespan.py index 6ecd90dc38..552317c436 100644 --- a/tests/no_version/test_unit_lifespan.py +++ b/tests/no_version/test_unit_lifespan.py @@ -21,11 +21,11 @@ async def test_app_with_lifespan( db, override_settings_factory, - task_factory_v2, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - job_factory_v2, + task_factory, + project_factory, + workflow_factory, + dataset_factory, + job_factory, tmp_path, local_resource_profile_db, ): @@ -57,17 +57,15 @@ async def test_app_with_lifespan( # verify shutdown assert len(app.state.jobsV2) == 0 - task = await task_factory_v2( - user_id=user.id, name="task", command="echo" - ) - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) - dataset1 = await dataset_factory_v2(project_id=project.id, name="ds-1") + task = await task_factory(user_id=user.id, name="task", command="echo") + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) + dataset1 = await dataset_factory(project_id=project.id, name="ds-1") await _workflow_insert_task_v2( workflow_id=workflow.id, task_id=task.id, db=db ) # Create jobv2 with submitted status - jobv2 = await job_factory_v2( + jobv2 = await job_factory( project_id=project.id, workflow_id=workflow.id, dataset_id=dataset1.id, diff --git a/tests/v2/test_01_schemas/test_schemas_dataset.py b/tests/v2/test_01_schemas/test_schemas_dataset.py index 0df66f1d3d..1bdff4bde7 100644 --- a/tests/v2/test_01_schemas/test_schemas_dataset.py +++ b/tests/v2/test_01_schemas/test_schemas_dataset.py @@ -10,7 +10,7 @@ from fractal_server.urls import normalize_url -async def test_schemas_dataset_v2(): +async def test_schemas_dataset(): project = ProjectV2(id=1, name="project") # Test zarr_dir=None is valid diff --git a/tests/v2/test_01_schemas/test_schemas_workflow.py b/tests/v2/test_01_schemas/test_schemas_workflow.py index 72168c69ce..8c04558dea 100644 --- a/tests/v2/test_01_schemas/test_schemas_workflow.py +++ b/tests/v2/test_01_schemas/test_schemas_workflow.py @@ -10,7 +10,7 @@ from fractal_server.app.schemas.v2 import WorkflowUpdate -async def test_schemas_workflow_v2(): +async def test_schemas_workflow(): project = ProjectV2(id=1, name="project") # Create @@ -42,7 +42,7 @@ async def test_schemas_workflow_v2(): WorkflowUpdate(name="new name", reordered_workflowtask_ids=[1, 2, 3]) -async def test_schemas_workflow_task_v2(): +async def test_schemas_workflow_task(): for attribute in ("args_parallel", "args_non_parallel"): WorkflowTaskCreate(**{attribute: dict(something="else")}) diff --git a/tests/v2/test_01_schemas/test_task_collection.py b/tests/v2/test_01_schemas/test_task_collection.py index 3eacdf4451..c2053b5c4f 100644 --- a/tests/v2/test_01_schemas/test_task_collection.py +++ b/tests/v2/test_01_schemas/test_task_collection.py @@ -10,7 +10,7 @@ from fractal_server.app.schemas.v2 import TaskGroupOriginEnum -def test_TaskCollectPipV2(): +def test_TaskCollectPip(): """ Check that leading/trailing whitespace characters were removed """ @@ -65,7 +65,7 @@ def test_TaskCollectPipV2(): ) -async def test_TaskCollectCustomV2(testdata_path): +async def test_TaskCollectCustom(testdata_path): manifest_file = ( testdata_path.parent / "v2/fractal_tasks_mock" @@ -130,7 +130,7 @@ async def test_TaskCollectCustomV2(testdata_path): assert collection.package_root == "/somewhere" -def test_TaskGroupCreateV2Strict(): +def test_TaskGroupCreateStrict(): # Success TaskGroupCreateStrict( path="/a", diff --git a/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py b/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py index 1de4b814e3..41799e1a97 100644 --- a/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py +++ b/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py @@ -5,7 +5,7 @@ from fractal_server.app.schemas.v2.manifest import ManifestV2 -def test_ManifestV2_jsonschema(): +def test_Manifest_jsonschema(): """ Generate a JSON Schema from the ManifestV1 Pydantic model, and compare it with the one currently present in the repository. diff --git a/tests/v2/test_02_models/test_tasks_v2.py b/tests/v2/test_02_models/test_tasks_v2.py index 2d6a4c36a0..706d47bc22 100644 --- a/tests/v2/test_02_models/test_tasks_v2.py +++ b/tests/v2/test_02_models/test_tasks_v2.py @@ -8,7 +8,7 @@ from fractal_server.app.schemas.v2 import TaskGroupActivityStatus -async def test_task_group_v2(db, local_resource_profile_db): +async def test_task_group(db, local_resource_profile_db): resource, profile = local_resource_profile_db user = UserOAuth( email="user@example.org", diff --git a/tests/v2/test_03_api/admin/test_admin_job.py b/tests/v2/test_03_api/admin/test_admin_job.py index 7a7888deca..b775a448cb 100644 --- a/tests/v2/test_03_api/admin/test_admin_job.py +++ b/tests/v2/test_03_api/admin/test_admin_job.py @@ -28,22 +28,20 @@ async def test_view_job( client, MockCurrentUser, tmp_path, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - task_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + dataset_factory, + task_factory, + job_factory, ): async with MockCurrentUser(user_kwargs={"is_superuser": False}) as user: - project = await project_factory_v2(user) + project = await project_factory(user) - workflow1 = await workflow_factory_v2(project_id=project.id) - workflow2 = await workflow_factory_v2(project_id=project.id) + workflow1 = await workflow_factory(project_id=project.id) + workflow2 = await workflow_factory(project_id=project.id) - task = await task_factory_v2( - user_id=user.id, name="task", source="source" - ) - dataset = await dataset_factory_v2(project_id=project.id) + task = await task_factory(user_id=user.id, name="task", source="source") + dataset = await dataset_factory(project_id=project.id) await _workflow_insert_task( workflow_id=workflow1.id, task_id=task.id, db=db @@ -52,7 +50,7 @@ async def test_view_job( workflow_id=workflow2.id, task_id=task.id, db=db ) - job1 = await job_factory_v2( + job1 = await job_factory( working_dir=f"{tmp_path.as_posix()}/aaaa1111", working_dir_user=f"{tmp_path.as_posix()}/aaaa2222", project_id=project.id, @@ -62,7 +60,7 @@ async def test_view_job( start_timestamp=datetime(2000, 1, 1, tzinfo=timezone.utc), ) - await job_factory_v2( + await job_factory( working_dir=f"{tmp_path.as_posix()}/bbbb1111", working_dir_user=f"{tmp_path.as_posix()}/bbbb2222", project_id=project.id, @@ -191,22 +189,20 @@ async def test_view_single_job( client, MockCurrentUser, tmp_path, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - task_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + dataset_factory, + task_factory, + job_factory, ): async with MockCurrentUser(user_kwargs={"is_superuser": False}) as user: - project = await project_factory_v2(user) + project = await project_factory(user) - workflow1 = await workflow_factory_v2(project_id=project.id) - workflow2 = await workflow_factory_v2(project_id=project.id) + workflow1 = await workflow_factory(project_id=project.id) + workflow2 = await workflow_factory(project_id=project.id) - task = await task_factory_v2( - user_id=user.id, name="task", source="source" - ) - dataset = await dataset_factory_v2(project_id=project.id) + task = await task_factory(user_id=user.id, name="task", source="source") + dataset = await dataset_factory(project_id=project.id) await _workflow_insert_task( workflow_id=workflow1.id, task_id=task.id, db=db @@ -215,7 +211,7 @@ async def test_view_single_job( workflow_id=workflow2.id, task_id=task.id, db=db ) - job = await job_factory_v2( + job = await job_factory( working_dir=tmp_path.as_posix(), project_id=project.id, dataset_id=dataset.id, @@ -247,11 +243,11 @@ async def test_view_single_job( async def test_patch_job( MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - job_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + job_factory, + task_factory, client, registered_superuser_client, db, @@ -261,23 +257,21 @@ async def test_patch_job( NEW_STATUS = JobStatusType.FAILED async with MockCurrentUser() as user: - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2( - user_id=user.id, name="task", source="source" - ) + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id, name="task", source="source") await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) - dataset = await dataset_factory_v2(project_id=project.id) - job = await job_factory_v2( + dataset = await dataset_factory(project_id=project.id) + job = await job_factory( working_dir=tmp_path.as_posix(), project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, status=ORIGINAL_STATUS, ) - job_done = await job_factory_v2( + job_done = await job_factory( working_dir=tmp_path.as_posix(), project_id=project.id, dataset_id=dataset.id, @@ -413,18 +407,18 @@ async def test_stop_job_slurm( MockCurrentUser, client, db, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, + project_factory, + dataset_factory, + workflow_factory, tmp_path, override_settings_factory, ): override_settings_factory(FRACTAL_RUNNER_BACKEND=backend) async with MockCurrentUser(user_kwargs=dict(is_superuser=True)) as user: - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) - dataset = await dataset_factory_v2(project_id=project.id) + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) + dataset = await dataset_factory(project_id=project.id) job = JobV2( working_dir=tmp_path.as_posix(), project_id=project.id, @@ -457,24 +451,24 @@ async def test_stop_job_slurm( async def test_download_job_logs( client, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - job_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + job_factory, + task_factory, db, tmp_path, ): async with MockCurrentUser() as user: - prj = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=prj.id, name="dataset") - workflow = await workflow_factory_v2(project_id=prj.id) - task = await task_factory_v2(user_id=user.id) + prj = await project_factory(user) + dataset = await dataset_factory(project_id=prj.id, name="dataset") + workflow = await workflow_factory(project_id=prj.id) + task = await task_factory(user_id=user.id) await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) working_dir = (tmp_path / "working_dir_for_zipping").as_posix() - job = await job_factory_v2( + job = await job_factory( project_id=prj.id, workflow_id=workflow.id, working_dir=working_dir, diff --git a/tests/v2/test_03_api/admin/test_admin_others.py b/tests/v2/test_03_api/admin/test_admin_others.py index f414e85e59..52405ead26 100644 --- a/tests/v2/test_03_api/admin/test_admin_others.py +++ b/tests/v2/test_03_api/admin/test_admin_others.py @@ -22,17 +22,17 @@ async def test_task_query( db, client, MockCurrentUser, - project_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + workflow_factory, + task_factory, ): async with MockCurrentUser(user_kwargs={"is_superuser": True}) as user: - project = await project_factory_v2(user) + project = await project_factory(user) - workflow1 = await workflow_factory_v2(project_id=project.id) - workflow2 = await workflow_factory_v2(project_id=project.id) + workflow1 = await workflow_factory(project_id=project.id) + workflow2 = await workflow_factory(project_id=project.id) - task1 = await task_factory_v2( + task1 = await task_factory( user_id=user.id, name="Foo", source="xxx", @@ -40,7 +40,7 @@ async def test_task_query( modality="HCS", authors="Name1 Surname1,Name2 Surname2...", ) - task2 = await task_factory_v2( + task2 = await task_factory( user_id=user.id, name="abcdef", source="yyy", @@ -48,7 +48,7 @@ async def test_task_query( modality="EM", authors="Name1 Surname3,Name3 Surname2...", ) - task3 = await task_factory_v2( + task3 = await task_factory( user_id=user.id, index=3, source="source3", modality="EM" ) diff --git a/tests/v2/test_03_api/admin/test_admin_taskgroup.py b/tests/v2/test_03_api/admin/test_admin_taskgroup.py index 5206bec4b0..4e969b7e9b 100644 --- a/tests/v2/test_03_api/admin/test_admin_taskgroup.py +++ b/tests/v2/test_03_api/admin/test_admin_taskgroup.py @@ -22,13 +22,13 @@ async def test_task_group_admin( db, client, MockCurrentUser, - project_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - task_factory_v2, + project_factory, + workflow_factory, + workflowtask_factory, + task_factory, ): async with MockCurrentUser() as user1: - task1 = await task_factory_v2( + task1 = await task_factory( user_id=user1.id, name="AaAa", ) @@ -37,7 +37,7 @@ async def test_task_group_admin( assert "resource_id" not in task_group_1 - task2 = await task_factory_v2( + task2 = await task_factory( name="BBB", user_id=user1.id, task_group_kwargs=dict(active=False), @@ -53,7 +53,7 @@ async def test_task_group_admin( debug(task_group_2) async with MockCurrentUser() as user2: - task3 = await task_factory_v2(user_id=user2.id, name="bbbbbbbb") + task3 = await task_factory(user_id=user2.id, name="bbbbbbbb") res = await client.get(f"/api/v2/task-group/{task3.taskgroupv2_id}/") task_group_3 = res.json() assert "resource_id" not in task_group_3 @@ -195,7 +195,7 @@ async def test_task_group_admin( async def test_get_task_group_activity( - client, MockCurrentUser, db, task_factory_v2 + client, MockCurrentUser, db, task_factory ): async with MockCurrentUser() as user1: activity1 = TaskGroupActivityV2( @@ -213,7 +213,7 @@ async def test_get_task_group_activity( action=TaskGroupActivityAction.REACTIVATE, ) async with MockCurrentUser() as user2: - task = await task_factory_v2(user_id=user2.id) + task = await task_factory(user_id=user2.id) activity3 = TaskGroupActivityV2( user_id=user2.id, pkg_name="foo", @@ -331,7 +331,7 @@ async def test_admin_deactivate_task_group_api( client, MockCurrentUser, db, - task_factory_v2, + task_factory, FRACTAL_RUNNER_BACKEND, override_settings_factory, local_resource_profile_db, @@ -354,15 +354,15 @@ async def test_admin_deactivate_task_group_api( user_kwargs=dict(profile_id=profile.id), ) as user: # Create mock task groups - non_active_task = await task_factory_v2( + non_active_task = await task_factory( user_id=user.id, name="task", task_group_kwargs=dict(active=False) ) - task_other = await task_factory_v2( + task_other = await task_factory( user_id=user.id, version=None, name="task", ) - task_pypi = await task_factory_v2( + task_pypi = await task_factory( user_id=user.id, name="task", version="1.2.3", @@ -431,7 +431,7 @@ async def test_reactivate_task_group_api( client, MockCurrentUser, db, - task_factory_v2, + task_factory, current_py_version, FRACTAL_RUNNER_BACKEND, override_settings_factory, @@ -450,16 +450,16 @@ async def test_reactivate_task_group_api( async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: # Create mock task groups - active_task = await task_factory_v2(user_id=user.id, name="task") + active_task = await task_factory(user_id=user.id, name="task") - task_other = await task_factory_v2( + task_other = await task_factory( user_id=user.id, version=None, name="task", task_group_kwargs=dict(active=False), ) - task_pypi = await task_factory_v2( + task_pypi = await task_factory( user_id=user.id, name="task", version="1.2.3", @@ -532,26 +532,26 @@ async def test_lifecycle_actions_with_submitted_jobs( db, client, MockCurrentUser, - task_factory_v2, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, + task_factory, + project_factory, + workflow_factory, + dataset_factory, ): async with MockCurrentUser() as user: # Create mock task groups - active_task = await task_factory_v2( + active_task = await task_factory( user_id=user.id, name="task-active", task_group_kwargs=dict(active=True), ) - non_active_task = await task_factory_v2( + non_active_task = await task_factory( user_id=user.id, name="task-non-active", task_group_kwargs=dict(active=False), ) - p = await project_factory_v2(user=user) - wf = await workflow_factory_v2() - ds = await dataset_factory_v2() + p = await project_factory(user=user) + wf = await workflow_factory() + ds = await dataset_factory() for task in [active_task, non_active_task]: await _workflow_insert_task( workflow_id=wf.id, @@ -593,13 +593,13 @@ async def test_lifecycle_actions_with_submitted_jobs( async def test_admin_delete_task_group_api_local( client, MockCurrentUser, - task_factory_v2, + task_factory, local_resource_profile_db, ): resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: - task = await task_factory_v2(user_id=user.id, name="task-name") + task = await task_factory(user_id=user.id, name="task-name") res = await client.get(f"/api/v2/task-group/{task.taskgroupv2_id}/") task_group_id = res.json()["id"] @@ -628,14 +628,14 @@ async def test_admin_delete_task_group_api_ssh( MockCurrentUser, app, tmp777_path, - task_factory_v2, + task_factory, fractal_ssh_list, slurm_ssh_resource_profile_db, ): app.state.fractal_ssh_list = fractal_ssh_list resource, profile = slurm_ssh_resource_profile_db[:] async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: - task = await task_factory_v2(user_id=user.id, name="task-name") + task = await task_factory(user_id=user.id, name="task-name") res = await client.get(f"/api/v2/task-group/{task.taskgroupv2_id}/") task_group_id = res.json()["id"] diff --git a/tests/v2/test_03_api/test_api_dataset.py b/tests/v2/test_03_api/test_api_dataset.py index 6d9dfcd49b..864065dd66 100644 --- a/tests/v2/test_03_api/test_api_dataset.py +++ b/tests/v2/test_03_api/test_api_dataset.py @@ -33,7 +33,7 @@ def n_images(n: int) -> list[dict]: ] -async def test_new_dataset_v2( +async def test_new_dataset( client, MockCurrentUser, local_resource_profile_db, @@ -109,9 +109,9 @@ async def test_new_dataset_v2( assert len(res.json()) == 1 -async def test_get_dataset(client, MockCurrentUser, project_factory_v2): +async def test_get_dataset(client, MockCurrentUser, project_factory): async with MockCurrentUser() as user: - project = await project_factory_v2(user) + project = await project_factory(user) p_id = project.id # Create dataset DATASET_NAME = "My Dataset" @@ -150,9 +150,9 @@ async def test_get_dataset(client, MockCurrentUser, project_factory_v2): debug(datasets[0]["timestamp_created"]) -async def test_post_dataset(client, MockCurrentUser, project_factory_v2): +async def test_post_dataset(client, MockCurrentUser, project_factory): async with MockCurrentUser() as user: - prj = await project_factory_v2(user) + prj = await project_factory(user) # Check that zarr_dir must be relative to one of user's project dirs res = await client.post( @@ -207,7 +207,7 @@ async def test_post_dataset(client, MockCurrentUser, project_factory_v2): async with MockCurrentUser( user_kwargs={"project_dirs": ["/some/dir"]} ) as user: - prj = await project_factory_v2(user) + prj = await project_factory(user) res = await client.post( f"{PREFIX}/project/{prj.id}/dataset/", json=dict(name="DSName") ) @@ -220,12 +220,12 @@ async def test_post_dataset(client, MockCurrentUser, project_factory_v2): async def test_delete_dataset( - client, MockCurrentUser, project_factory_v2, dataset_factory_v2 + client, MockCurrentUser, project_factory, dataset_factory ): async with MockCurrentUser() as user: - prj = await project_factory_v2(user) - ds0 = await dataset_factory_v2(project_id=prj.id) - ds1 = await dataset_factory_v2(project_id=prj.id) + prj = await project_factory(user) + ds0 = await dataset_factory(project_id=prj.id) + ds1 = await dataset_factory(project_id=prj.id) ds_ids = (ds0.id, ds1.id) @@ -250,11 +250,11 @@ async def test_delete_dataset( async def test_delete_dataset_cascade_jobs( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, + job_factory, tmp_path, client, ): @@ -266,16 +266,16 @@ async def test_delete_dataset_cascade_jobs( """ async with MockCurrentUser() as user: # Populate the database with the appropriate objects - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id, name="task") + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id, name="task") await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) - dataset = await dataset_factory_v2(project_id=project.id) + dataset = await dataset_factory(project_id=project.id) # Create a job in relationship with dataset and workflow - job = await job_factory_v2( + job = await job_factory( project_id=project.id, workflow_id=workflow.id, dataset_id=dataset.id, @@ -298,25 +298,25 @@ async def test_delete_dataset_cascade_jobs( assert res.status_code == 404 # Assert tha we cannot stop a dataset linked to a running job - ds_deletable = await dataset_factory_v2(id=2, project_id=project.id) - ds_not_deletable = await dataset_factory_v2(id=3, project_id=project.id) + ds_deletable = await dataset_factory(id=2, project_id=project.id) + ds_not_deletable = await dataset_factory(id=3, project_id=project.id) common_args = { "project_id": project.id, "workflow_id": workflow.id, "working_dir": (tmp_path / "some_working_dir").as_posix(), } - j1 = await job_factory_v2( + j1 = await job_factory( dataset_id=ds_deletable.id, status=JobStatusType.DONE, **common_args, ) - j2 = await job_factory_v2( + j2 = await job_factory( dataset_id=ds_deletable.id, status=JobStatusType.FAILED, **common_args, ) - await job_factory_v2( + await job_factory( dataset_id=ds_not_deletable.id, status=JobStatusType.SUBMITTED, # reason why ds is not deletable **common_args, @@ -337,11 +337,11 @@ async def test_delete_dataset_cascade_jobs( async def test_patch_dataset( - app, client, MockCurrentUser, project_factory_v2, dataset_factory_v2 + app, client, MockCurrentUser, project_factory, dataset_factory ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2( + project = await project_factory(user) + dataset = await dataset_factory( project_id=project.id, ) project_id = project.id @@ -407,7 +407,7 @@ async def test_patch_dataset( async def test_dataset_import( client, MockCurrentUser, - project_factory_v2, + project_factory, db, ): ZARR_DIR = "/something" @@ -415,7 +415,7 @@ async def test_dataset_import( EXPECTED_ATTRIBUTE_FILTERS = dict(key1=["value1"]) async with MockCurrentUser() as user: - project = await project_factory_v2(user) + project = await project_factory(user) ENDPOINT_URL = f"{PREFIX}/project/{project.id}/dataset/import/" # FAILURE: Images with zarr_urls not relative to zarr_dir @@ -475,11 +475,11 @@ async def test_dataset_import( async def test_export_dataset( - client, MockCurrentUser, project_factory_v2, dataset_factory_v2 + client, MockCurrentUser, project_factory, dataset_factory ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id) res = await client.get( f"/api/v2/project/{project.id}/dataset/{dataset.id}/export/" ) diff --git a/tests/v2/test_03_api/test_api_dataset_images.py b/tests/v2/test_03_api/test_api_dataset_images.py index 8091af220a..9e96eae57a 100644 --- a/tests/v2/test_03_api/test_api_dataset_images.py +++ b/tests/v2/test_03_api/test_api_dataset_images.py @@ -48,15 +48,15 @@ def assert_expected_attributes_and_flags(res, tot_images: int): async def test_query_images( MockCurrentUser, client, - project_factory_v2, - dataset_factory_v2, + project_factory, + dataset_factory, ): N = 101 images = n_images(N) async with MockCurrentUser() as user: - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=ZARR_DIR, images=images ) @@ -214,19 +214,19 @@ async def test_query_images( async def test_delete_images( MockCurrentUser, client, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, + workflowtask_factory, + job_factory, db, ): IMAGES = n_images(10) async with MockCurrentUser() as user: - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=ZARR_DIR, images=IMAGES ) res = await client.post( @@ -234,12 +234,12 @@ async def test_delete_images( ) assert res.json()["total_count"] == len(IMAGES) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id) - wftask = await workflowtask_factory_v2( + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -310,16 +310,16 @@ async def test_delete_images( async def test_post_new_image( MockCurrentUser, client, - project_factory_v2, - dataset_factory_v2, + project_factory, + dataset_factory, ): N = 10 images = n_images(N) async with MockCurrentUser() as user: - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=ZARR_DIR, images=images ) @@ -382,14 +382,14 @@ async def test_post_new_image( async def test_patch_images( MockCurrentUser, client, - project_factory_v2, - dataset_factory_v2, + project_factory, + dataset_factory, db, ): IMAGES = n_images(1) async with MockCurrentUser() as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id, images=IMAGES) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id, images=IMAGES) res = await client.patch( f"{PREFIX}/project/{project.id}/dataset/{dataset.id}/images/", diff --git a/tests/v2/test_03_api/test_api_history.py b/tests/v2/test_03_api/test_api_history.py index decc688097..af245636ef 100644 --- a/tests/v2/test_03_api/test_api_history.py +++ b/tests/v2/test_03_api/test_api_history.py @@ -7,9 +7,9 @@ async def test_get_workflow_tasks_statuses( - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, + project_factory, + dataset_factory, + workflow_factory, MockCurrentUser, client, db, @@ -35,11 +35,9 @@ async def test_get_workflow_tasks_statuses( user_kwargs={"is_verified": True, "profile_id": profile.id} ) as user: user_id = user.id - project = await project_factory_v2(user) - dataset = await dataset_factory_v2( - project_id=project.id, name="dataset1" - ) - workflow = await workflow_factory_v2( + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id, name="dataset1") + workflow = await workflow_factory( project_id=project.id, name="workflow" ) @@ -179,9 +177,9 @@ async def test_get_workflow_tasks_statuses( async def test_multiple_jobs_error( - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, + project_factory, + dataset_factory, + workflow_factory, MockCurrentUser, client, db, @@ -192,11 +190,9 @@ async def test_multiple_jobs_error( """ async with MockCurrentUser(user_kwargs={"is_verified": True}) as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2( - project_id=project.id, name="dataset1" - ) - workflow = await workflow_factory_v2( + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id, name="dataset1") + workflow = await workflow_factory( project_id=project.id, name="workflow" ) diff --git a/tests/v2/test_03_api/test_api_job.py b/tests/v2/test_03_api/test_api_job.py index 5c0108bb9b..037db3bd27 100644 --- a/tests/v2/test_03_api/test_api_job.py +++ b/tests/v2/test_03_api/test_api_job.py @@ -42,10 +42,10 @@ async def test_submit_job_failures( db, client, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, local_resource_profile_db, slurm_sudo_resource_profile_db, ): @@ -57,26 +57,26 @@ async def test_submit_job_failures( profile_id=prof.id, ) ) as user: - task = await task_factory_v2(user_id=user.id) + task = await task_factory(user_id=user.id) # 1 - project1 = await project_factory_v2(user) - dataset1 = await dataset_factory_v2( + project1 = await project_factory(user) + dataset1 = await dataset_factory( project_id=project1.id, name="dataset1" ) - workflow1a = await workflow_factory_v2(project_id=project1.id) - workflow1b = await workflow_factory_v2(project_id=project1.id) + workflow1a = await workflow_factory(project_id=project1.id) + workflow1b = await workflow_factory(project_id=project1.id) await _workflow_insert_task( workflow_id=workflow1a.id, task_id=task.id, db=db ) # 2 - project2 = await project_factory_v2(user) - workflow2 = await workflow_factory_v2(project_id=project2.id) + project2 = await project_factory(user) + workflow2 = await workflow_factory(project_id=project2.id) # 3 - project3 = await project_factory_v2(user, resource_id=res2.id) - dataset3 = await dataset_factory_v2( + project3 = await project_factory(user, resource_id=res2.id) + dataset3 = await dataset_factory( project_id=project3.id, name="dataset3" ) - workflow3 = await workflow_factory_v2(project_id=project3.id) + workflow3 = await workflow_factory(project_id=project3.id) await _workflow_insert_task( workflow_id=workflow3.id, task_id=task.id, db=db ) @@ -135,10 +135,10 @@ async def test_submit_job_ssh_connection_failure( db, client, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, tmp777_path, slurm_ssh_resource_profile_fake_db, ): @@ -150,12 +150,12 @@ async def test_submit_job_ssh_connection_failure( profile_id=prof.id, ) ) as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2( + project = await project_factory(user) + dataset = await dataset_factory( project_id=project.id, name="ds1", type="type1" ) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id, name="1to2") + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id, name="1to2") await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) @@ -173,10 +173,10 @@ async def test_submit_incompatible_filters( db, client, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, local_resource_profile_db, ): res, prof = local_resource_profile_db @@ -186,12 +186,12 @@ async def test_submit_incompatible_filters( profile_id=prof.id, ) ) as user: - task = await task_factory_v2(user_id=user.id, input_types={"a": True}) + task = await task_factory(user_id=user.id, input_types={"a": True}) - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id) - workflow1 = await workflow_factory_v2(project_id=project.id) + workflow1 = await workflow_factory(project_id=project.id) await _workflow_insert_task( db=db, workflow_id=workflow1.id, @@ -207,7 +207,7 @@ async def test_submit_incompatible_filters( assert res.status_code == 422 assert "filters" in res.json()["detail"] - workflow2 = await workflow_factory_v2(project_id=project.id) + workflow2 = await workflow_factory(project_id=project.id) await _workflow_insert_task( db=db, workflow_id=workflow2.id, @@ -225,11 +225,11 @@ async def test_submit_incompatible_filters( async def test_submit_jobs_with_same_dataset( db, client, - project_factory_v2, - job_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - task_factory_v2, + project_factory, + job_factory, + workflow_factory, + dataset_factory, + task_factory, tmp_path, MockCurrentUser, local_resource_profile_db, @@ -245,28 +245,24 @@ async def test_submit_jobs_with_same_dataset( profile_id=prof.id, ) ) as user: - project = await project_factory_v2(user) - dataset1 = await dataset_factory_v2( - project_id=project.id, name="dataset1" - ) - dataset2 = await dataset_factory_v2( - project_id=project.id, name="dataset2" - ) - new_task = await task_factory_v2(user_id=user.id) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + dataset1 = await dataset_factory(project_id=project.id, name="dataset1") + dataset2 = await dataset_factory(project_id=project.id, name="dataset2") + new_task = await task_factory(user_id=user.id) + workflow = await workflow_factory(project_id=project.id) await _workflow_insert_task( workflow_id=workflow.id, task_id=new_task.id, db=db ) # Existing jobs with done/running status - await job_factory_v2( + await job_factory( project_id=project.id, dataset_id=dataset1.id, workflow_id=workflow.id, working_dir=tmp_path.as_posix(), status="done", ) - await job_factory_v2( + await job_factory( project_id=project.id, dataset_id=dataset2.id, workflow_id=workflow.id, @@ -305,10 +301,10 @@ async def test_project_apply_workflow_subset( db, client, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, local_resource_profile_db, ): res, prof = local_resource_profile_db @@ -318,18 +314,18 @@ async def test_project_apply_workflow_subset( profile_id=prof.id, ) ) as user: - project = await project_factory_v2(user) - dataset1 = await dataset_factory_v2( + project = await project_factory(user) + dataset1 = await dataset_factory( project_id=project.id, name="ds1", type="type1" ) - dataset2 = await dataset_factory_v2( + dataset2 = await dataset_factory( project_id=project.id, name="ds2", type="type2" ) - wf = await workflow_factory_v2(project_id=project.id) + wf = await workflow_factory(project_id=project.id) - task12 = await task_factory_v2(user_id=user.id, name="1to2") - task23 = await task_factory_v2(user_id=user.id, name="2to3") + task12 = await task_factory(user_id=user.id, name="1to2") + task23 = await task_factory(user_id=user.id, name="2to3") await _workflow_insert_task(workflow_id=wf.id, task_id=task12.id, db=db) await _workflow_insert_task(workflow_id=wf.id, task_id=task23.id, db=db) @@ -430,10 +426,10 @@ async def test_project_apply_workflow_subset( async def test_project_apply_slurm_account( MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, client, db, local_resource_profile_db, @@ -445,12 +441,12 @@ async def test_project_apply_slurm_account( profile_id=profile.id, ) ) as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2( + project = await project_factory(user) + dataset = await dataset_factory( project_id=project.id, name="ds1", type="type1" ) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id) + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id) await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) @@ -483,12 +479,12 @@ async def test_project_apply_slurm_account( "slurm_accounts": SLURM_LIST, }, ) as user2: - project = await project_factory_v2(user2) - dataset = await dataset_factory_v2( + project = await project_factory(user2) + dataset = await dataset_factory( project_id=project.id, name="ds2", type="type2" ) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2( + workflow = await workflow_factory(project_id=project.id) + task = await task_factory( user_id=user2.id, input_type="type2", output_type="type2", @@ -535,11 +531,11 @@ async def test_project_apply_slurm_account( async def test_get_jobs( db, client, - project_factory_v2, - job_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - task_factory_v2, + project_factory, + job_factory, + workflow_factory, + dataset_factory, + task_factory, tmp_path, MockCurrentUser, local_resource_profile_db, @@ -555,13 +551,11 @@ async def test_get_jobs( profile_id=prof.id, ) ) as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2( - project_id=project.id, name="dataset1" - ) - new_task = await task_factory_v2(user_id=user.id) - workflow1 = await workflow_factory_v2(project_id=project.id) - workflow2 = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id, name="dataset1") + new_task = await task_factory(user_id=user.id) + workflow1 = await workflow_factory(project_id=project.id) + workflow2 = await workflow_factory(project_id=project.id) await _workflow_insert_task( workflow_id=workflow1.id, task_id=new_task.id, db=db ) @@ -569,7 +563,7 @@ async def test_get_jobs( workflow_id=workflow2.id, task_id=new_task.id, db=db ) - await job_factory_v2( + await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow1.id, @@ -577,7 +571,7 @@ async def test_get_jobs( status="done", log="hello world", ) - job2 = await job_factory_v2( + job2 = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow2.id, @@ -644,10 +638,10 @@ async def test_get_jobs( async def test_get_jobs_access_control( db, client, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - task_factory_v2, + project_factory, + workflow_factory, + dataset_factory, + task_factory, MockCurrentUser, local_resource_profile_db, ): @@ -662,12 +656,10 @@ async def test_get_jobs_access_control( profile_id=prof.id, ) ) as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2( - project_id=project.id, name="dataset" - ) - task = await task_factory_v2(user_id=user.id) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id, name="dataset") + task = await task_factory(user_id=user.id) + workflow = await workflow_factory(project_id=project.id) await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) @@ -727,23 +719,23 @@ async def test_stop_job( db, client, MockCurrentUser, - project_factory_v2, - job_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - task_factory_v2, + project_factory, + job_factory, + workflow_factory, + dataset_factory, + task_factory, tmp_path, override_settings_factory, ): override_settings_factory(FRACTAL_RUNNER_BACKEND=backend) async with MockCurrentUser() as user: - project = await project_factory_v2(user) - wf = await workflow_factory_v2(project_id=project.id) - t = await task_factory_v2(user_id=user.id, name="task") - ds = await dataset_factory_v2(project_id=project.id) + project = await project_factory(user) + wf = await workflow_factory(project_id=project.id) + t = await task_factory(user_id=user.id, name="task") + ds = await dataset_factory(project_id=project.id) await _workflow_insert_task(workflow_id=wf.id, task_id=t.id, db=db) - job = await job_factory_v2( + job = await job_factory( working_dir=tmp_path.as_posix(), project_id=project.id, dataset_id=ds.id, @@ -769,10 +761,10 @@ async def test_update_timestamp_taskgroup( db, client, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, local_resource_profile_db, ): res, prof = local_resource_profile_db @@ -782,12 +774,10 @@ async def test_update_timestamp_taskgroup( profile_id=prof.id, ) ) as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2( - project_id=project.id, name="dataset" - ) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id, name="dataset") + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id) await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) @@ -813,11 +803,11 @@ async def test_update_timestamp_taskgroup( async def test_get_latest_jobs( db, client, - project_factory_v2, - job_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - task_factory_v2, + project_factory, + job_factory, + workflow_factory, + dataset_factory, + task_factory, tmp_path, MockCurrentUser, local_resource_profile_db, @@ -829,24 +819,22 @@ async def test_get_latest_jobs( profile_id=prof.id, ) ) as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2( - project_id=project.id, name="dataset" - ) - task = await task_factory_v2(user_id=user.id) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id, name="dataset") + task = await task_factory(user_id=user.id) + workflow = await workflow_factory(project_id=project.id) await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) - await job_factory_v2( + await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, working_dir=tmp_path.as_posix(), status="done", ) - job2 = await job_factory_v2( + job2 = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -861,7 +849,7 @@ async def test_get_latest_jobs( assert res.status_code == 200 assert res.json()["id"] == job2.id - job3 = await job_factory_v2( + job3 = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, diff --git a/tests/v2/test_03_api/test_api_project.py b/tests/v2/test_03_api/test_api_project.py index 221a87b28e..4549168714 100644 --- a/tests/v2/test_03_api/test_api_project.py +++ b/tests/v2/test_03_api/test_api_project.py @@ -14,7 +14,7 @@ PREFIX = "/api/v2" -async def _project_list_v2(user: UserOAuth, db): +async def _project_list(user: UserOAuth, db): stm = ( select(ProjectV2) .join(LinkUserProjectV2, LinkUserProjectV2.project_id == ProjectV2.id) @@ -45,32 +45,32 @@ async def test_post_and_get_project( ) as userA: res = await client.post(f"{PREFIX}/project/", json=dict(name="project")) assert res.status_code == 201 - assert len(await _project_list_v2(userA, db)) == 1 + assert len(await _project_list(userA, db)) == 1 other_project = res.json() async with MockCurrentUser() as userB: res = await client.get(f"{PREFIX}/project/") assert res.status_code == 200 - assert res.json() == await _project_list_v2(userB, db) == [] + assert res.json() == await _project_list(userB, db) == [] res = await client.post(f"{PREFIX}/project/", json=dict(name="project")) assert res.status_code == 201 - assert len(await _project_list_v2(userB, db)) == 1 + assert len(await _project_list(userB, db)) == 1 # a user can't create two projectsV2 with the same name res = await client.post(f"{PREFIX}/project/", json=dict(name="project")) assert res.status_code == 422 - assert len(await _project_list_v2(userB, db)) == 1 + assert len(await _project_list(userB, db)) == 1 res = await client.get(f"{PREFIX}/project/") assert res.status_code == 200 assert len(res.json()) == 1 - assert res.json()[0]["id"] == (await _project_list_v2(userB, db))[0].id + assert res.json()[0]["id"] == (await _project_list(userB, db))[0].id project_id = res.json()[0]["id"] res = await client.get(f"{PREFIX}/project/{project_id}/") assert res.status_code == 200 - assert res.json()["id"] == (await _project_list_v2(userB, db))[0].id + assert res.json()["id"] == (await _project_list(userB, db))[0].id # fail on non existent project res = await client.get(f"{PREFIX}/project/123456/") @@ -214,10 +214,10 @@ async def test_delete_project( MockCurrentUser, db, tmp_path, - dataset_factory_v2, - workflow_factory_v2, - job_factory_v2, - task_factory_v2, + dataset_factory, + workflow_factory, + job_factory, + task_factory, local_resource_profile_db, ): resource, profile = local_resource_profile_db @@ -244,16 +244,16 @@ async def test_delete_project( project_id = res.json()[0]["id"] # Add a dataset to the project - dataset = await dataset_factory_v2(project_id=project_id) + dataset = await dataset_factory(project_id=project_id) dataset_id = dataset.id # Add a workflow to the project - wf = await workflow_factory_v2(project_id=p["id"]) - t = await task_factory_v2(user_id=user.id) + wf = await workflow_factory(project_id=p["id"]) + t = await task_factory(user_id=user.id) await _workflow_insert_task(workflow_id=wf.id, task_id=t.id, db=db) # Add a job to the project - await job_factory_v2( + await job_factory( project_id=p["id"], workflow_id=wf.id, working_dir=(tmp_path / "some_working_dir").as_posix(), @@ -307,24 +307,24 @@ async def test_delete_project_ongoing_jobs( MockCurrentUser, db, tmp_path, - project_factory_v2, - job_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + job_factory, + dataset_factory, + workflow_factory, + task_factory, ): async with MockCurrentUser() as user: async def get_project_id_linked_to_job(status: JobStatusType) -> int: - p = await project_factory_v2(user) - d = await dataset_factory_v2(project_id=p.id) - w = await workflow_factory_v2(project_id=p.id) - t = await task_factory_v2( + p = await project_factory(user) + d = await dataset_factory(project_id=p.id) + w = await workflow_factory(project_id=p.id) + t = await task_factory( user_id=user.id, name=f"task_{status}", ) await _workflow_insert_task(workflow_id=w.id, task_id=t.id, db=db) - await job_factory_v2( + await job_factory( project_id=p.id, workflow_id=w.id, dataset_id=d.id, diff --git a/tests/v2/test_03_api/test_api_sharing.py b/tests/v2/test_03_api/test_api_sharing.py index 4d5e18ab47..a4bf1c8700 100644 --- a/tests/v2/test_03_api/test_api_sharing.py +++ b/tests/v2/test_03_api/test_api_sharing.py @@ -465,16 +465,16 @@ async def test_project_sharing_task_group_access( client, MockCurrentUser, local_resource_profile_db, - project_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, + project_factory, + workflow_factory, + workflowtask_factory, ): _, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs={"profile_id": profile.id}) as user1: # User 1 creates a project and a workflow - project = await project_factory_v2(user1) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user1) + workflow = await workflow_factory(project_id=project.id) # User 1 inserts a non-private task res = await client.post( @@ -504,7 +504,7 @@ async def test_project_sharing_task_group_access( taskgroup = await db.get(TaskGroupV2, res.json()["taskgroupv2_id"]) assert taskgroup.user_group_id is None - await workflowtask_factory_v2( + await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, ) @@ -580,7 +580,7 @@ async def test_project_sharing_subquery( client, MockCurrentUser, local_resource_profile_db, - project_factory_v2, + project_factory, ): """ See ssue: @@ -590,8 +590,8 @@ async def test_project_sharing_subquery( user_kwargs = {"profile_id": profile.id} async with MockCurrentUser(user_kwargs=user_kwargs) as user1: # User 1 creates two projects - project1 = await project_factory_v2(user1) - await project_factory_v2(user1) + project1 = await project_factory(user1) + await project_factory(user1) async with MockCurrentUser(user_kwargs=user_kwargs) as user2: # User 1 shares Project 1 with User 2 diff --git a/tests/v2/test_03_api/test_api_task.py b/tests/v2/test_03_api/test_api_task.py index 773b92bca4..7ac732dea4 100644 --- a/tests/v2/test_03_api/test_api_task.py +++ b/tests/v2/test_03_api/test_api_task.py @@ -57,14 +57,14 @@ async def test_fail_wheel_file_and_version(client, testdata_path): async def test_task_get_list( - db, client, task_factory_v2, MockCurrentUser, user_group_factory + db, client, task_factory, MockCurrentUser, user_group_factory ): async with MockCurrentUser() as user: new_group = await user_group_factory( group_name="new_group", user_id=user.id ) - await task_factory_v2( + await task_factory( user_id=user.id, task_group_kwargs=dict(user_group_id=new_group.id), index=1, @@ -73,14 +73,14 @@ async def test_task_get_list( authors="Name1 Surname1,Name2 Surname2...", ) - await task_factory_v2( + await task_factory( user_id=user.id, index=2, category="Conversion", modality="EM", authors="NAME1 SURNAME3", ) - t = await task_factory_v2( + t = await task_factory( user_id=user.id, index=3, args_schema_non_parallel=dict(a=1), @@ -344,7 +344,7 @@ async def test_patch_task_auth( async def test_patch_task( - task_factory_v2, + task_factory, MockCurrentUser, client, ): @@ -352,13 +352,13 @@ async def test_patch_task( user_kwargs=dict(is_superuser=True, is_verified=True) ) as user_A: user_A_id = user_A.id - task_parallel = await task_factory_v2( + task_parallel = await task_factory( user_id=user_A_id, index=1, type="parallel" ) - task_non_parallel = await task_factory_v2( + task_non_parallel = await task_factory( user_id=user_A_id, index=2, type="non_parallel" ) - task_compound = await task_factory_v2(user_id=user_A_id, index=3) + task_compound = await task_factory(user_id=user_A_id, index=3) # Test successuful patch of task_compound update = TaskUpdate( input_types={"input": True, "output": False}, @@ -422,7 +422,7 @@ async def test_patch_task( async def test_get_task( - task_factory_v2, + task_factory, client, MockCurrentUser, local_resource_profile_db, @@ -432,12 +432,12 @@ async def test_get_task( resource2, profile2 = slurm_sudo_resource_profile_db async with MockCurrentUser(user_kwargs={"profile_id": profile.id}) as user1: - task1 = await task_factory_v2(user_id=user1.id, name="name1") + task1 = await task_factory(user_id=user1.id, name="name1") async with MockCurrentUser( user_kwargs={"profile_id": profile2.id} ) as user2: - task2 = await task_factory_v2(user_id=user2.id, name="name2") + task2 = await task_factory(user_id=user2.id, name="name2") res = await client.get(f"{PREFIX}/{task2.id}/") assert res.status_code == 200 res = await client.get(f"{PREFIX}/9999/") diff --git a/tests/v2/test_03_api/test_api_task_group.py b/tests/v2/test_03_api/test_api_task_group.py index d0ece63066..0769bef103 100644 --- a/tests/v2/test_03_api/test_api_task_group.py +++ b/tests/v2/test_03_api/test_api_task_group.py @@ -9,9 +9,7 @@ PREFIX = "/api/v2/task-group" -async def test_get_single_task_group( - client, MockCurrentUser, task_factory_v2, db -): +async def test_get_single_task_group(client, MockCurrentUser, task_factory, db): async with MockCurrentUser() as user1: # Create a new UserGroup with user1 new_group = UserGroup(name="new_group") @@ -23,7 +21,7 @@ async def test_get_single_task_group( await db.commit() await db.close() - task = await task_factory_v2( + task = await task_factory( user_id=user1.id, task_group_kwargs=dict(user_group_id=new_group.id), ) @@ -48,7 +46,7 @@ async def test_get_single_task_group( async def test_get_task_group_list( client, MockCurrentUser, - task_factory_v2, + task_factory, default_user_group, db, ): @@ -56,7 +54,7 @@ async def test_get_task_group_list( # of the `GET /api/v2/task-group/` response, because it has lower priority # than the same task group belonging to user2 async with MockCurrentUser() as user1: - task_by_user3 = await task_factory_v2( + task_by_user3 = await task_factory( user_id=user1.id, task_group_kwargs=dict( pkg_name="bbb", @@ -66,7 +64,7 @@ async def test_get_task_group_list( ) async with MockCurrentUser() as user2: - await task_factory_v2( + await task_factory( user_id=user2.id, args_schema_non_parallel={"foo": 0, "bar": 1}, args_schema_parallel={"xxx": 2, "yyy": 3}, @@ -76,7 +74,7 @@ async def test_get_task_group_list( user_group_id=None, ), ) - await task_factory_v2( + await task_factory( user_id=user2.id, task_group_kwargs=dict( active=False, pkg_name="aaa", version="1.2.3" @@ -84,22 +82,22 @@ async def test_get_task_group_list( args_schema_non_parallel={"foo": 4, "bar": 5}, args_schema_parallel={"xxx": 6, "yyy": 7}, ) - await task_factory_v2( + await task_factory( user_id=user2.id, task_group_kwargs=dict(active=False, pkg_name="bbb", version="xxx"), ) - await task_factory_v2( + await task_factory( user_id=user2.id, task_group_kwargs=dict(active=False, pkg_name="bbb", version="abc"), ) - await task_factory_v2( + await task_factory( user_id=user2.id, task_group_kwargs=dict( pkg_name="bbb", version=None, ), ) - await task_factory_v2( + await task_factory( user_id=user2.id, task_group_kwargs=dict( active=False, pkg_name="bbb", version="1.0.1" @@ -158,7 +156,7 @@ async def test_get_task_group_list( await db.commit() await db.close() - await task_factory_v2( + await task_factory( user_id=user3.id, task_group_kwargs=dict(user_group_id=new_group.id), ) @@ -184,7 +182,7 @@ async def test_get_task_group_list( async def test_patch_task_group( client, MockCurrentUser, - task_factory_v2, + task_factory, default_user_group, user_group_factory, ): @@ -192,13 +190,13 @@ async def test_patch_task_group( another_user_id = another_user.id async with MockCurrentUser(debug=True) as user1: - taskA = await task_factory_v2( + taskA = await task_factory( name="asd", user_id=user1.id, task_group_kwargs=dict(user_group_id=default_user_group.id), ) group2 = await user_group_factory("team2", user1.id, another_user_id) - taskB = await task_factory_v2( + taskB = await task_factory( name="asd", user_id=another_user_id, task_group_kwargs=dict(user_group_id=group2.id), @@ -282,12 +280,12 @@ async def test_get_task_group_activity_list( client, MockCurrentUser, db, - task_factory_v2, + task_factory, local_resource_profile_db, ): resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: - task = await task_factory_v2( + task = await task_factory( user_id=user.id, task_group_kwargs=dict(resource_id=resource.id), ) diff --git a/tests/v2/test_03_api/test_api_verify_image_types.py b/tests/v2/test_03_api/test_api_verify_image_types.py index 3af3a0a315..4255eb78ef 100644 --- a/tests/v2/test_03_api/test_api_verify_image_types.py +++ b/tests/v2/test_03_api/test_api_verify_image_types.py @@ -9,8 +9,8 @@ async def test_verify_image_types( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, + project_factory, + dataset_factory, client, ): ZARR_DIR = "/zarr_dir" @@ -60,9 +60,9 @@ async def test_verify_image_types( index += 1 async with MockCurrentUser() as user: - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=ZARR_DIR, images=images ) @@ -109,12 +109,12 @@ async def test_verify_image_types( async def test_check_non_processed_images( - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - dataset_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + task_factory, + dataset_factory, + workflowtask_factory, + job_factory, client, MockCurrentUser, tmp_path, @@ -126,47 +126,47 @@ async def test_check_non_processed_images( """ async with MockCurrentUser() as user: - task1 = await task_factory_v2( + task1 = await task_factory( user_id=user.id, name="a", ) - task2 = await task_factory_v2( + task2 = await task_factory( user_id=user.id, output_types={"x": True}, name="b", ) - task3 = await task_factory_v2( + task3 = await task_factory( name="c", user_id=user.id, type="converter_non_parallel", ) - project = await project_factory_v2(user) + project = await project_factory(user) - workflow = await workflow_factory_v2(project_id=project.id) - wft1 = await workflowtask_factory_v2( + workflow = await workflow_factory(project_id=project.id) + wft1 = await workflowtask_factory( workflow_id=workflow.id, task_id=task1.id, ) - wft2 = await workflowtask_factory_v2( + wft2 = await workflowtask_factory( workflow_id=workflow.id, task_id=task2.id, ) - wft3 = await workflowtask_factory_v2( + wft3 = await workflowtask_factory( workflow_id=workflow.id, task_id=task1.id, ) - await workflowtask_factory_v2( + await workflowtask_factory( workflow_id=workflow.id, task_id=task3.id, # converter task ) - wft5 = await workflowtask_factory_v2( + wft5 = await workflowtask_factory( workflow_id=workflow.id, task_id=task1.id, ) n = 10 - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir="/zarr_dir", images=[ @@ -182,7 +182,7 @@ async def test_check_non_processed_images( ).model_dump() ], ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, diff --git a/tests/v2/test_03_api/test_api_workflow.py b/tests/v2/test_03_api/test_api_workflow.py index 6345c12669..b229e1d45a 100644 --- a/tests/v2/test_03_api/test_api_workflow.py +++ b/tests/v2/test_03_api/test_api_workflow.py @@ -40,7 +40,7 @@ async def add_task( return res.json() -async def test_post_workflow(db, client, MockCurrentUser, project_factory_v2): +async def test_post_workflow(db, client, MockCurrentUser, project_factory): async with MockCurrentUser() as user: project_id = None res = await client.post( @@ -54,8 +54,8 @@ async def test_post_workflow(db, client, MockCurrentUser, project_factory_v2): ) assert res.status_code == 404 # project does not exist - project1 = await project_factory_v2(user) - project2 = await project_factory_v2(user) + project1 = await project_factory(user) + project2 = await project_factory(user) workflow = dict(name="My Workflow") res = await client.post( @@ -81,11 +81,11 @@ async def test_post_workflow(db, client, MockCurrentUser, project_factory_v2): async def test_delete_workflow( - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - dataset_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + task_factory, + dataset_factory, + job_factory, db, client, MockCurrentUser, @@ -98,7 +98,7 @@ async def test_delete_workflow( """ async with MockCurrentUser() as user: # Create project - project = await project_factory_v2(user) + project = await project_factory(user) p_id = project.id workflow = dict(name="My Workflow") @@ -109,7 +109,7 @@ async def test_delete_workflow( wf_id = res.json()["id"] # Create a task - task = await task_factory_v2(user_id=user.id, name="task1") + task = await task_factory(user_id=user.id, name="task1") # Add a dummy task to workflow res = await client.post( @@ -141,10 +141,10 @@ async def test_delete_workflow( assert len(res) == 0 # Assert you cannot delete a Workflow linked to an ongoing Job - wf_deletable_1 = await workflow_factory_v2(project_id=project.id) - wf_deletable_2 = await workflow_factory_v2(project_id=project.id) - wf_not_deletable_1 = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id, name="task2") + wf_deletable_1 = await workflow_factory(project_id=project.id) + wf_deletable_2 = await workflow_factory(project_id=project.id) + wf_not_deletable_1 = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id, name="task2") await _workflow_insert_task( workflow_id=wf_deletable_1.id, task_id=task.id, db=db ) @@ -156,23 +156,23 @@ async def test_delete_workflow( task_id=task.id, db=db, ) - dataset = await dataset_factory_v2(project_id=project.id) + dataset = await dataset_factory(project_id=project.id) payload = dict( project_id=project.id, dataset_id=dataset.id, working_dir=(tmp_path / "some_working_dir").as_posix(), ) - j1 = await job_factory_v2( + j1 = await job_factory( workflow_id=wf_deletable_1.id, status=JobStatusType.DONE, **payload, ) - j2 = await job_factory_v2( + j2 = await job_factory( workflow_id=wf_deletable_2.id, status=JobStatusType.FAILED, **payload, ) - await job_factory_v2( + await job_factory( workflow_id=wf_not_deletable_1.id, status=JobStatusType.SUBMITTED, **payload, @@ -200,9 +200,9 @@ async def test_delete_workflow( async def test_get_workflow( client, MockCurrentUser, - task_factory_v2, - project_factory_v2, - workflow_factory_v2, + task_factory, + project_factory, + workflow_factory, db, ): """ @@ -213,10 +213,10 @@ async def test_get_workflow( # Create several kinds of tasks async with MockCurrentUser() as user_A: user_A_id = user_A.id - t1 = await task_factory_v2(user_id=user_A_id, name="1") - t2 = await task_factory_v2(user_id=user_A_id, name="2") + t1 = await task_factory(user_id=user_A_id, name="1") + t2 = await task_factory(user_id=user_A_id, name="2") async with MockCurrentUser() as user_B: - t3 = await task_factory_v2(user_id=user_B.id, name="3") + t3 = await task_factory(user_id=user_B.id, name="3") tg3 = await db.get(TaskGroupV2, t3.taskgroupv2_id) tg2 = await db.get(TaskGroupV2, t2.taskgroupv2_id) tg3.user_group_id = None @@ -226,12 +226,12 @@ async def test_get_workflow( await db.commit() async with MockCurrentUser(user_kwargs=dict(id=user_A_id)) as user_A: - project = await project_factory_v2(user_A) + project = await project_factory(user_A) p_id = project.id # Create workflow WORKFLOW_NAME = "My Workflow" - wf = await workflow_factory_v2(project_id=p_id, name=WORKFLOW_NAME) + wf = await workflow_factory(project_id=p_id, name=WORKFLOW_NAME) wf_id = wf.id for task in [t1, t2, t3]: @@ -267,7 +267,7 @@ async def test_get_workflow( async def test_get_project_workflows( - db, client, MockCurrentUser, project_factory_v2 + db, client, MockCurrentUser, project_factory ): """ GIVEN a Project containing three Workflows @@ -276,8 +276,8 @@ async def test_get_project_workflows( THEN the list of all its Workflows is returned """ async with MockCurrentUser() as user: - project = await project_factory_v2(user) - other_project = await project_factory_v2(user) + project = await project_factory(user) + other_project = await project_factory(user) workflow1 = {"name": "WF1"} workflow2 = {"name": "WF2"} workflow3 = {"name": "WF3"} @@ -307,7 +307,7 @@ async def test_get_project_workflows( async def test_patch_workflow( - client, MockCurrentUser, project_factory_v2, db, task_factory_v2 + client, MockCurrentUser, project_factory, db, task_factory ): """ GIVEN a Workflow @@ -317,10 +317,10 @@ async def test_patch_workflow( # Create several kinds of tasks async with MockCurrentUser() as user_A: user_A_id = user_A.id - t1 = await task_factory_v2(user_id=user_A_id, name="1") - t2 = await task_factory_v2(user_id=user_A_id, name="2") + t1 = await task_factory(user_id=user_A_id, name="1") + t2 = await task_factory(user_id=user_A_id, name="2") async with MockCurrentUser() as user_B: - t3 = await task_factory_v2(user_id=user_B.id, name="3") + t3 = await task_factory(user_id=user_B.id, name="3") tg3 = await db.get(TaskGroupV2, t3.taskgroupv2_id) tg2 = await db.get(TaskGroupV2, t2.taskgroupv2_id) tg3.user_group_id = None @@ -330,7 +330,7 @@ async def test_patch_workflow( await db.commit() async with MockCurrentUser() as user: - project = await project_factory_v2(user) + project = await project_factory(user) # POST a Workflow with name `WF` res = await client.post( @@ -386,11 +386,11 @@ async def test_patch_workflow( async def test_delete_workflow_with_job( client, MockCurrentUser, - project_factory_v2, - job_factory_v2, - task_factory_v2, - workflow_factory_v2, - dataset_factory_v2, + project_factory, + job_factory, + task_factory, + workflow_factory, + dataset_factory, tmp_path, db, ): @@ -400,17 +400,17 @@ async def test_delete_workflow_with_job( THEN Job.workflow_id is set to None """ async with MockCurrentUser() as user: - project = await project_factory_v2(user) + project = await project_factory(user) # Create a workflow and a job in relationship with it - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id, name="1") + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id, name="1") await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) - dataset = await dataset_factory_v2(project_id=project.id) + dataset = await dataset_factory(project_id=project.id) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, workflow_id=workflow.id, dataset_id=dataset.id, @@ -432,14 +432,14 @@ async def test_delete_workflow_with_job( async def test_workflow_type_filters_flow( client, MockCurrentUser, - task_factory_v2, - project_factory_v2, - workflow_factory_v2, + task_factory, + project_factory, + workflow_factory, db, ): async with MockCurrentUser() as user: - proj = await project_factory_v2(user) - wf = await workflow_factory_v2(project_id=proj.id) + proj = await project_factory(user) + wf = await workflow_factory(project_id=proj.id) # FAILURE due to empty workflow res = await client.get( @@ -449,11 +449,9 @@ async def test_workflow_type_filters_flow( assert "Workflow has no tasks" in str(res.json()) # Add a workflow task - task_converter = await task_factory_v2( - user_id=user.id, name="converter" - ) - task_cellpose = await task_factory_v2(user_id=user.id, name="cellpose") - task_MIP = await task_factory_v2( + task_converter = await task_factory(user_id=user.id, name="converter") + task_cellpose = await task_factory(user_id=user.id, name="cellpose") + task_MIP = await task_factory( user_id=user.id, name="mip", input_types={"is_3D": True}, diff --git a/tests/v2/test_03_api/test_api_workflow_import_export.py b/tests/v2/test_03_api/test_api_workflow_import_export.py index 33d2383e18..04af98ae57 100644 --- a/tests/v2/test_03_api/test_api_workflow_import_export.py +++ b/tests/v2/test_03_api/test_api_workflow_import_export.py @@ -14,8 +14,8 @@ async def test_import_export( client, MockCurrentUser, - task_factory_v2, - project_factory_v2, + task_factory, + project_factory, testdata_path, db, ): @@ -33,11 +33,11 @@ def wf_modify(task_import: dict, new_name: str | None = None): wf_file_task_source_1 = workflow_from_file["task_list"][1]["task"]["source"] async with MockCurrentUser() as user: - prj = await project_factory_v2(user) - task_with_source0 = await task_factory_v2( + prj = await project_factory(user) + task_with_source0 = await task_factory( user_id=user.id, source=wf_file_task_source_0, name="0" ) - task_with_source1 = await task_factory_v2( + task_with_source1 = await task_factory( user_id=user.id, source=wf_file_task_source_1, name="1" ) @@ -143,7 +143,7 @@ def wf_modify(task_import: dict, new_name: str | None = None): error_msg = "Could not find a task matching with source='INVALID'." assert error_msg in res.json()["detail"] - first_task_no_source = await task_factory_v2( + first_task_no_source = await task_factory( user_id=user.id, name="cellpose_segmentation", task_group_kwargs=dict( @@ -200,7 +200,7 @@ def wf_modify(task_import: dict, new_name: str | None = None): await db.commit() await db.close() - await task_factory_v2( + await task_factory( user_id=user.id, name="cellpose_segmentation", task_group_kwargs=dict( @@ -371,7 +371,7 @@ async def test_unit_get_task_by_taskimport(): async def test_unit_disambiguate_task_groups( MockCurrentUser, - task_factory_v2, + task_factory, db, default_user_group, ): @@ -406,7 +406,7 @@ async def test_unit_disambiguate_task_groups( db.add(LinkUserGroup(user_id=user3_id, group_id=new_group.id)) await db.commit() - task_A = await task_factory_v2( + task_A = await task_factory( name="taskA", user_id=user1_id, task_group_kwargs=dict( @@ -416,7 +416,7 @@ async def test_unit_disambiguate_task_groups( ), ) - task_B = await task_factory_v2( + task_B = await task_factory( name="taskB", user_id=user2_id, task_group_kwargs=dict( @@ -426,7 +426,7 @@ async def test_unit_disambiguate_task_groups( ), ) - task_C = await task_factory_v2( + task_C = await task_factory( name="taskC", user_id=user3_id, task_group_kwargs=dict( @@ -491,13 +491,13 @@ async def test_unit_disambiguate_task_groups( async def test_import_with_legacy_filters( client, MockCurrentUser, - task_factory_v2, - project_factory_v2, + task_factory, + project_factory, ): async with MockCurrentUser() as user: - prj = await project_factory_v2(user) + prj = await project_factory(user) ENDPOINT_URL = f"{PREFIX}/project/{prj.id}/workflow/import/" - task = await task_factory_v2( + task = await task_factory( name="mytask", version="myversion", user_id=user.id, @@ -614,13 +614,13 @@ async def test_import_with_legacy_filters( async def test_import_filters_compatibility( MockCurrentUser, - project_factory_v2, - task_factory_v2, + project_factory, + task_factory, client, ): async with MockCurrentUser() as user: - prj = await project_factory_v2(user) - await task_factory_v2( + prj = await project_factory(user) + await task_factory( user_id=user.id, source="foo", input_types={"a": True, "b": False}, @@ -653,8 +653,8 @@ async def test_import_filters_compatibility( async def test_import_multiple_task_groups_same_version( client, MockCurrentUser, - task_factory_v2, - project_factory_v2, + task_factory, + project_factory, db, ): """ @@ -675,7 +675,7 @@ async def test_import_multiple_task_groups_same_version( await db.commit() await db.refresh(some_usergroup) - await task_factory_v2( + await task_factory( user_id=user2.id, name=TASK_NAME, task_group_kwargs=dict( @@ -686,13 +686,13 @@ async def test_import_multiple_task_groups_same_version( ) async with MockCurrentUser() as user1: - proj = await project_factory_v2(user1) + proj = await project_factory(user1) db.add(LinkUserGroup(user_id=user1.id, group_id=some_usergroup.id)) db.add(LinkUserGroup(user_id=user2_id, group_id=some_usergroup.id)) await db.commit() - await task_factory_v2( + await task_factory( user_id=user1.id, name=TASK_NAME, task_group_kwargs=dict( @@ -701,7 +701,7 @@ async def test_import_multiple_task_groups_same_version( ), version=V1, ) - await task_factory_v2( + await task_factory( user_id=user1.id, name=TASK_NAME, task_group_kwargs=dict( diff --git a/tests/v2/test_03_api/test_api_workflow_task.py b/tests/v2/test_03_api/test_api_workflow_task.py index 916f581bb1..81f17c3673 100644 --- a/tests/v2/test_03_api/test_api_workflow_task.py +++ b/tests/v2/test_03_api/test_api_workflow_task.py @@ -43,9 +43,9 @@ async def post_task( async def test_post_worfkflow_task( client, MockCurrentUser, - project_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + workflow_factory, + task_factory, local_resource_profile_db, ): """ @@ -57,8 +57,8 @@ async def test_post_worfkflow_task( resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: # Create project and workflow - proj = await project_factory_v2(user) - wf = await workflow_factory_v2(project_id=proj.id) + proj = await project_factory(user) + wf = await workflow_factory(project_id=proj.id) wf_id = wf.id # Test that adding an invalid task fails with 404 @@ -104,7 +104,7 @@ async def test_post_worfkflow_task( assert task_list[2]["args_non_parallel"] == args_payload # Test type filters compatibility - task = await task_factory_v2(user_id=user.id, input_types={"a": False}) + task = await task_factory(user_id=user.id, input_types={"a": False}) res = await client.post( f"{PREFIX}/project/{proj.id}/workflow/{wf_id}/wftask/" f"?task_id={task.id}", @@ -123,9 +123,9 @@ async def test_post_worfkflow_task( async def test_post_worfkflow_task_failures( client, MockCurrentUser, - project_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + workflow_factory, + task_factory, db, local_resource_profile_db, ): @@ -140,11 +140,11 @@ async def test_post_worfkflow_task_failures( user_kwargs=dict(profile_id=profile.id) ) as user_A: user_A_id = user_A.id - task_A_active = await task_factory_v2( + task_A_active = await task_factory( name="a-active", user_id=user_A_id, ) - task_A_non_active = await task_factory_v2( + task_A_non_active = await task_factory( name="a-non-active", user_id=user_A_id, task_group_kwargs=dict(active=False), @@ -163,7 +163,7 @@ async def test_post_worfkflow_task_failures( await db.close() user_B_id = user_B.id - task_B = await task_factory_v2( + task_B = await task_factory( name="a", user_id=user_B_id, task_group_kwargs=dict(user_group_id=new_group.id), @@ -171,8 +171,8 @@ async def test_post_worfkflow_task_failures( async with MockCurrentUser(user_kwargs=dict(id=user_A_id)) as user: # Create project and workflow - proj = await project_factory_v2(user) - wf = await workflow_factory_v2(project_id=proj.id) + proj = await project_factory(user) + wf = await workflow_factory(project_id=proj.id) wf_id = wf.id endpoint_path = f"{PREFIX}/project/{proj.id}/workflow/{wf_id}/wftask/" @@ -232,7 +232,7 @@ async def test_delete_workflow_task( db, client, MockCurrentUser, - project_factory_v2, + project_factory, local_resource_profile_db, ): """ @@ -244,7 +244,7 @@ async def test_delete_workflow_task( """ resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: - project = await project_factory_v2(user) + project = await project_factory(user) res = await client.post( f"{PREFIX}/project/{project.id}/workflow/", json=dict(name="My Workflow"), @@ -321,8 +321,8 @@ async def test_delete_workflow_task( async def test_patch_workflow_task( client, MockCurrentUser, - project_factory_v2, - task_factory_v2, + project_factory, + task_factory, local_resource_profile_db, ): """ @@ -332,7 +332,7 @@ async def test_patch_workflow_task( """ resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: - project = await project_factory_v2(user) + project = await project_factory(user) workflow = {"name": "WF"} res = await client.post( f"{PREFIX}/project/{project.id}/workflow/", json=workflow @@ -487,7 +487,7 @@ async def test_patch_workflow_task( ) assert res.status_code == 201 wf_id = res.json()["id"] - task = await task_factory_v2(user_id=user.id, input_types={"a": False}) + task = await task_factory(user_id=user.id, input_types={"a": False}) res = await client.post( f"{PREFIX}/project/{project.id}/workflow/{wf_id}/wftask/" f"?task_id={task.id}", @@ -510,7 +510,7 @@ async def test_patch_workflow_task( async def test_patch_workflow_task_with_args_schema( - client, MockCurrentUser, project_factory_v2, task_factory_v2 + client, MockCurrentUser, project_factory, task_factory ): """ GIVEN a Task with args_schema and a WorkflowTask @@ -533,14 +533,14 @@ class _Arguments(BaseModel): async with MockCurrentUser() as user: # Create DB objects - project = await project_factory_v2(user) + project = await project_factory(user) workflow = {"name": "WF"} res = await client.post( f"{PREFIX}/project/{project.id}/workflow/", json=workflow ) assert res.status_code == 201 wf_id = res.json()["id"] - task = await task_factory_v2( + task = await task_factory( user_id=user.id, name="task with schema", command_non_parallel="cmd", @@ -591,7 +591,7 @@ class _Arguments(BaseModel): async def test_patch_workflow_task_failures( client, MockCurrentUser, - project_factory_v2, + project_factory, local_resource_profile_db, ): """ @@ -603,7 +603,7 @@ async def test_patch_workflow_task_failures( resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: # Prepare two workflows, with one task each - project = await project_factory_v2(user) + project = await project_factory(user) workflow1 = {"name": "WF1"} res = await client.post( f"{PREFIX}/project/{project.id}/workflow/", json=workflow1 @@ -675,7 +675,7 @@ async def test_patch_workflow_task_failures( async def test_reorder_task_list( - project_factory_v2, + project_factory, client, MockCurrentUser, local_resource_profile_db, @@ -696,7 +696,7 @@ async def test_reorder_task_list( resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: # Create a main project and a pool of available tasks - project = await project_factory_v2(user) + project = await project_factory(user) tasks = [(await post_task(client, f"task-{ind}")) for ind in range(5)] for ind_perm, permutation in enumerate(reorder_cases): @@ -767,7 +767,7 @@ async def test_reorder_task_list( async def test_reorder_task_list_fail( client, MockCurrentUser, - project_factory_v2, + project_factory, db, local_resource_profile_db, ): @@ -781,7 +781,7 @@ async def test_reorder_task_list_fail( resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: # Create project, workflow, tasks, workflowtasks - project = await project_factory_v2(user) + project = await project_factory(user) res = await client.post( f"{PREFIX}/project/{project.id}/workflow/", json=dict(name="WF") ) @@ -871,13 +871,13 @@ async def test_reorder_task_list_fail( async def test_read_workflowtask( MockCurrentUser, - project_factory_v2, + project_factory, client, local_resource_profile_db, ): resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: - project = await project_factory_v2(user) + project = await project_factory(user) res = await client.post( f"{PREFIX}/project/{project.id}/workflow/", json=dict(name="My Workflow"), @@ -901,47 +901,45 @@ async def test_read_workflowtask( async def test_replace_task_in_workflowtask( - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - workflowtask_factory_v2, + project_factory, + workflow_factory, + task_factory, + workflowtask_factory, client, MockCurrentUser, db, ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) assert workflow.task_list == [] - task1 = await task_factory_v2(name="1", user_id=user.id) - task2 = await task_factory_v2( - name="2", user_id=user.id, type="parallel" - ) - task3 = await task_factory_v2(name="3", user_id=user.id) - task4 = await task_factory_v2( + task1 = await task_factory(name="1", user_id=user.id) + task2 = await task_factory(name="2", user_id=user.id, type="parallel") + task3 = await task_factory(name="3", user_id=user.id) + task4 = await task_factory( name="4", user_id=user.id, type="non_parallel" ) - wft1 = await workflowtask_factory_v2( + wft1 = await workflowtask_factory( workflow_id=workflow.id, task_id=task1.id, args_parallel={"wft1": "wft1"}, args_non_parallel={"wft1": "wft1"}, type_filters={"a": True}, ) - wft2 = await workflowtask_factory_v2( + wft2 = await workflowtask_factory( workflow_id=workflow.id, task_id=task2.id, args_parallel={"wft2": "wft2"}, ) - wft3 = await workflowtask_factory_v2( + wft3 = await workflowtask_factory( workflow_id=workflow.id, task_id=task3.id, args_parallel={"wft3": "wft3"}, args_non_parallel={"wft3": "wft3"}, ) - wft4 = await workflowtask_factory_v2( + wft4 = await workflowtask_factory( workflow_id=workflow.id, task_id=task4.id, args_non_parallel={"wft4": "wft4"}, @@ -955,7 +953,7 @@ async def test_replace_task_in_workflowtask( wft4.id, ] - task5 = await task_factory_v2( + task5 = await task_factory( name="5", user_id=user.id, type="compound", @@ -1065,8 +1063,8 @@ async def test_replace_task_in_workflowtask( debug(res.json()) # Test type filters compatibility - task6 = await task_factory_v2(user_id=user.id, input_types={"a": False}) - task7 = await task_factory_v2( + task6 = await task_factory(user_id=user.id, input_types={"a": False}) + task7 = await task_factory( user_id=user.id, input_types={"a": True}, name="7", diff --git a/tests/v2/test_03_api/test_get_task_group_read_access.py b/tests/v2/test_03_api/test_get_task_group_read_access.py index db0ddacc0d..b2349a377f 100644 --- a/tests/v2/test_03_api/test_get_task_group_read_access.py +++ b/tests/v2/test_03_api/test_get_task_group_read_access.py @@ -12,10 +12,10 @@ async def test_get_task_group_read_access( MockCurrentUser, - task_factory_v2, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, + task_factory, + project_factory, + workflow_factory, + dataset_factory, db, client, local_resource_profile_db, diff --git a/tests/v2/test_03_api/test_loss_of_access_to_task.py b/tests/v2/test_03_api/test_loss_of_access_to_task.py index ff4f5eff4f..84213edbcf 100644 --- a/tests/v2/test_03_api/test_loss_of_access_to_task.py +++ b/tests/v2/test_03_api/test_loss_of_access_to_task.py @@ -8,10 +8,10 @@ async def test_loss_of_access_to_task( MockCurrentUser, - task_factory_v2, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, + task_factory, + project_factory, + workflow_factory, + dataset_factory, db, client, local_resource_profile_db, @@ -48,12 +48,12 @@ async def test_loss_of_access_to_task( await db.commit() # Create tasks with different ownership info - task_A = await task_factory_v2( + task_A = await task_factory( command_non_parallel="echo", user_id=user_A.id, name=f"iteration-{i}-A", ) - task_B = await task_factory_v2( + task_B = await task_factory( command_non_parallel="echo", user_id=user_B.id, task_group_kwargs=dict(user_group_id=team_group.id), @@ -61,13 +61,13 @@ async def test_loss_of_access_to_task( ) async with MockCurrentUser(user_kwargs=dict(id=user_A.id)) as user: # Prepare all objects - project = await project_factory_v2(user) - dataset = await dataset_factory_v2( + project = await project_factory(user) + dataset = await dataset_factory( project_id=project.id, zarr_dir="/fake/", images=[dict(zarr_url="/fake/1")], ) - workflow = await workflow_factory_v2(project_id=project.id) + workflow = await workflow_factory(project_id=project.id) await _workflow_insert_task( workflow_id=workflow.id, task_id=task_A.id, db=db ) diff --git a/tests/v2/test_03_api/test_status_legacy.py b/tests/v2/test_03_api/test_status_legacy.py index 4331a038e4..999d058537 100644 --- a/tests/v2/test_03_api/test_status_legacy.py +++ b/tests/v2/test_03_api/test_status_legacy.py @@ -9,21 +9,21 @@ async def test_status_legacy( MockCurrentUser, db, client, - task_factory_v2, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - job_factory_v2, + task_factory, + project_factory, + workflow_factory, + dataset_factory, + job_factory, ): async with MockCurrentUser() as user: - task1 = await task_factory_v2( + task1 = await task_factory( user_id=user.id, name="task1", command_non_parallel="echo" ) - task2 = await task_factory_v2( + task2 = await task_factory( user_id=user.id, name="task2", command_non_parallel="echo" ) - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) wftask1 = await _workflow_insert_task( workflow_id=workflow.id, task_id=task1.id, db=db ) @@ -31,7 +31,7 @@ async def test_status_legacy( workflow_id=workflow.id, task_id=task2.id, db=db ) - dataset1 = await dataset_factory_v2( + dataset1 = await dataset_factory( project_id=project.id, name="ds1", ) @@ -42,7 +42,7 @@ async def test_status_legacy( assert res.status_code == 200 assert res.json() == {"status": {}} - dataset2 = await dataset_factory_v2( + dataset2 = await dataset_factory( project_id=project.id, name="ds2", history=[ @@ -77,7 +77,7 @@ async def test_status_legacy( flag_modified(dataset2, "history") await db.commit() - await job_factory_v2( + await job_factory( project_id=project.id, workflow_id=workflow.id, dataset_id=dataset2.id, @@ -97,7 +97,7 @@ async def test_status_legacy( } } - await job_factory_v2( + await job_factory( project_id=project.id, workflow_id=workflow.id, dataset_id=dataset2.id, diff --git a/tests/v2/test_03_api/test_submission_job_list_v2.py b/tests/v2/test_03_api/test_submission_job_list_v2.py index 868b83c806..43b12cf7f6 100644 --- a/tests/v2/test_03_api/test_submission_job_list_v2.py +++ b/tests/v2/test_03_api/test_submission_job_list_v2.py @@ -1,21 +1,19 @@ from fractal_server.app.routes.api.v2._aux_functions import ( _workflow_insert_task, ) -from fractal_server.app.routes.api.v2._aux_functions import ( - clean_app_job_list_v2, -) +from fractal_server.app.routes.api.v2._aux_functions import clean_app_job_list -async def test_clean_app_job_list_v2( +async def test_clean_app_job_list( MockCurrentUser, db, app, client, - task_factory_v2, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - job_factory_v2, + task_factory, + project_factory, + workflow_factory, + dataset_factory, + job_factory, override_settings_factory, local_resource_profile_db, ): @@ -34,19 +32,19 @@ async def test_clean_app_job_list_v2( ) ) as user: # Create DB objects - task = await task_factory_v2( + task = await task_factory( user_id=user.id, name="task", command_non_parallel="echo" ) - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) - dataset1 = await dataset_factory_v2(project_id=project.id, name="ds-1") - dataset2 = await dataset_factory_v2(project_id=project.id, name="ds-2") + dataset1 = await dataset_factory(project_id=project.id, name="ds-1") + dataset2 = await dataset_factory(project_id=project.id, name="ds-2") # Create job with submitted status - job1 = await job_factory_v2( + job1 = await job_factory( project_id=project.id, workflow_id=workflow.id, dataset_id=dataset1.id, @@ -69,5 +67,5 @@ async def test_clean_app_job_list_v2( assert app.state.jobsV2 == [job1_id, job2_id] # After clean-up, only the submitted job is left - jobs_list = await clean_app_job_list_v2(db, app.state.jobsV2) + jobs_list = await clean_app_job_list(db, app.state.jobsV2) assert jobs_list == [job1_id] diff --git a/tests/v2/test_03_api/test_task_version_update.py b/tests/v2/test_03_api/test_task_version_update.py index 2e7f69d196..1961c9270d 100644 --- a/tests/v2/test_03_api/test_task_version_update.py +++ b/tests/v2/test_03_api/test_task_version_update.py @@ -3,40 +3,40 @@ async def test_get_workflow_version_update_candidates( MockCurrentUser, - project_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - task_factory_v2, + project_factory, + workflow_factory, + workflowtask_factory, + task_factory, client, db, ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) # Matching tasks (0, 1, 2) - task0 = await task_factory_v2( + task0 = await task_factory( user_id=user.id, version="1", task_group_kwargs={"pkg_name": "my_pkg", "version": "1"}, name="my_task", args_schema_parallel={"foo": "bar"}, ) - task1 = await task_factory_v2( + task1 = await task_factory( user_id=user.id, version="2", task_group_kwargs={"pkg_name": "my_pkg", "version": "2"}, name="my_task", args_schema_parallel={"foo": "bar"}, ) - task2 = await task_factory_v2( + task2 = await task_factory( user_id=user.id, version="3", task_group_kwargs={"pkg_name": "my_pkg", "version": "3"}, name="my_task", args_schema_parallel={"foo": "bar"}, ) - task3 = await task_factory_v2( + task3 = await task_factory( user_id=user.id, version="4", task_group_kwargs={"pkg_name": "my_pkg", "version": "4"}, @@ -44,14 +44,14 @@ async def test_get_workflow_version_update_candidates( args_schema_parallel={"foo": "bar"}, ) # Task with no args schemas - task4 = await task_factory_v2( + task4 = await task_factory( user_id=user.id, version="5", task_group_kwargs={"pkg_name": "my_pkg", "version": "5"}, name="my_task", ) # Task with non-parsable version - task5 = await task_factory_v2( + task5 = await task_factory( user_id=user.id, name="my_task", args_schema_parallel={"foo": "bar"}, @@ -62,7 +62,7 @@ async def test_get_workflow_version_update_candidates( }, ) # Task with non-matching pkg_name - task6 = await task_factory_v2( + task6 = await task_factory( user_id=user.id, version="6", task_group_kwargs={"pkg_name": "another-one", "version": "6"}, @@ -71,7 +71,7 @@ async def test_get_workflow_version_update_candidates( args_schema_parallel={"foo": "bar"}, ) # Task with non-compatible type - task7 = await task_factory_v2( + task7 = await task_factory( user_id=user.id, version="7", task_group_kwargs={"pkg_name": "my_pkg", "version": "6"}, @@ -81,7 +81,7 @@ async def test_get_workflow_version_update_candidates( ) assert task0.type != task7.type # Non-active task - task6 = await task_factory_v2( + task6 = await task_factory( user_id=user.id, version="8", task_group_kwargs={ @@ -103,9 +103,7 @@ async def test_get_workflow_version_update_candidates( task6, task7, ]: - await workflowtask_factory_v2( - workflow_id=workflow.id, task_id=task.id - ) + await workflowtask_factory(workflow_id=workflow.id, task_id=task.id) await db.refresh(workflow) diff --git a/tests/v2/test_03_api/test_unit_aux_functions.py b/tests/v2/test_03_api/test_unit_aux_functions.py index 69a3be9a9d..64dfdce66f 100644 --- a/tests/v2/test_03_api/test_unit_aux_functions.py +++ b/tests/v2/test_03_api/test_unit_aux_functions.py @@ -45,14 +45,14 @@ async def test_404_functions(db): async def test_get_project_check_access( MockCurrentUser, - project_factory_v2, + project_factory, db, ): async with MockCurrentUser() as other_user: - other_project = await project_factory_v2(other_user) + other_project = await project_factory(other_user) async with MockCurrentUser() as user: - project = await project_factory_v2(user) + project = await project_factory(user) # Test success await _get_project_check_access( @@ -153,17 +153,17 @@ async def test_get_project_check_access( async def test_get_workflow_check_access( MockCurrentUser, - project_factory_v2, - workflow_factory_v2, + project_factory, + workflow_factory, db, ): async with MockCurrentUser() as other_user: - other_project = await project_factory_v2(other_user) - other_workflow = await workflow_factory_v2(project_id=other_project.id) + other_project = await project_factory(other_user) + other_workflow = await workflow_factory(project_id=other_project.id) async with MockCurrentUser() as user: - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) # Test success await _get_workflow_check_access( @@ -202,22 +202,22 @@ async def test_get_workflow_check_access( async def test_get_workflow_task_check_access( MockCurrentUser, - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - workflowtask_factory_v2, + project_factory, + workflow_factory, + task_factory, + workflowtask_factory, db, ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id, name="a") - wftask = await workflowtask_factory_v2( + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id, name="a") + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id ) - other_workflow = await workflow_factory_v2(project_id=project.id) - other_task = await task_factory_v2(user_id=user.id, name="B") - other_wftask = await workflowtask_factory_v2( + other_workflow = await workflow_factory(project_id=project.id) + other_task = await task_factory(user_id=user.id, name="B") + other_wftask = await workflowtask_factory( workflow_id=other_workflow.id, task_id=other_task.id ) @@ -259,13 +259,13 @@ async def test_get_workflow_task_check_access( async def test_check_workflow_exists( MockCurrentUser, - project_factory_v2, - workflow_factory_v2, + project_factory, + workflow_factory, db, ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) # Test success await _check_workflow_exists( @@ -295,14 +295,14 @@ async def test_check_workflow_exists( async def test_get_dataset_check_access( MockCurrentUser, - project_factory_v2, - dataset_factory_v2, + project_factory, + dataset_factory, db, ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) - other_project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id) + project = await project_factory(user) + other_project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id) # Test success res = await _get_dataset_check_access( @@ -342,20 +342,20 @@ async def test_get_dataset_check_access( async def test_get_job_check_access( MockCurrentUser, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - job_factory_v2, - task_factory_v2, + project_factory, + workflow_factory, + dataset_factory, + job_factory, + task_factory, db, tmp_path, ): async with MockCurrentUser() as user: - project = await project_factory_v2(user, id=1) - other_project = await project_factory_v2(user, id=2) + project = await project_factory(user, id=1) + other_project = await project_factory(user, id=2) - workflow = await workflow_factory_v2(project_id=project.id) - t = await task_factory_v2(user_id=user.id) + workflow = await workflow_factory(project_id=project.id) + t = await task_factory(user_id=user.id) with pytest.raises(ValueError): await _workflow_insert_task( @@ -365,9 +365,9 @@ async def test_get_job_check_access( workflow_id=workflow.id, task_id=t.id, db=db ) - dataset = await dataset_factory_v2(project_id=project.id) + dataset = await dataset_factory(project_id=project.id) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, diff --git a/tests/v2/test_03_api/test_unit_aux_functions_tasks.py b/tests/v2/test_03_api/test_unit_aux_functions_tasks.py index 63f339b5cb..ae15cc5ba5 100644 --- a/tests/v2/test_03_api/test_unit_aux_functions_tasks.py +++ b/tests/v2/test_03_api/test_unit_aux_functions_tasks.py @@ -32,7 +32,7 @@ async def test_get_task( db, - task_factory_v2, + task_factory, local_resource_profile_db, slurm_sudo_resource_profile_db, ): @@ -90,13 +90,13 @@ async def test_get_task( db.add(LinkUserGroup(user_id=user_C.id, group_id=group_A.id)) await db.commit() - task_A_no_group = await task_factory_v2(user_id=user_A1.id, name="1") - task_A_group_A = await task_factory_v2( + task_A_no_group = await task_factory(user_id=user_A1.id, name="1") + task_A_group_A = await task_factory( user_id=user_A1.id, task_group_kwargs=dict(user_group_id=group_A.id), name="2", ) - task_A_group_A_resource2 = await task_factory_v2( + task_A_group_A_resource2 = await task_factory( user_id=user_C.id, task_group_kwargs=dict(user_group_id=group_A.id), name="3", @@ -165,7 +165,7 @@ async def test_get_task( async def test_get_task_require_active( - db, task_factory_v2, local_resource_profile_db, override_settings_factory + db, task_factory, local_resource_profile_db, override_settings_factory ): """ Test the `require_active` argument of `_get_task_read_access`. @@ -190,7 +190,7 @@ async def test_get_task_require_active( db.add(LinkUserGroup(user_id=user.id, group_id=group_0.id)) await db.commit() - task = await task_factory_v2(user_id=user.id) + task = await task_factory(user_id=user.id) task_group = await db.get(TaskGroupV2, task.taskgroupv2_id) # Make sure task group is active, and verify access is always OK diff --git a/tests/v2/test_03_api/test_unit_timezone.py b/tests/v2/test_03_api/test_unit_timezone.py index fe7e3576f6..289302daea 100644 --- a/tests/v2/test_03_api/test_unit_timezone.py +++ b/tests/v2/test_03_api/test_unit_timezone.py @@ -5,14 +5,14 @@ async def test_timezone_api( client, db, MockCurrentUser, - project_factory_v2, + project_factory, ): """ Test that API returns a timestamp in the same format which corresponds to the one in the database. """ async with MockCurrentUser() as user: - project = await project_factory_v2( + project = await project_factory( name="project name", user=user, ) diff --git a/tests/v2/test_04_runner/execute_tasks_v2.py b/tests/v2/test_04_runner/execute_tasks_v2.py index 604e715ade..73705d286c 100644 --- a/tests/v2/test_04_runner/execute_tasks_v2.py +++ b/tests/v2/test_04_runner/execute_tasks_v2.py @@ -4,10 +4,10 @@ from fractal_server.runner.executors.local.get_local_config import ( get_local_backend_config, ) -from fractal_server.runner.v2.runner import execute_tasks_v2 +from fractal_server.runner.v2.runner import execute_tasks -def execute_tasks_v2_mod( +def execute_tasks_mod( wf_task_list: list[WorkflowTaskV2], workflow_dir_local: Path, user_id: int, @@ -19,7 +19,7 @@ def execute_tasks_v2_mod( """ This is a version of `execute_tasks_v2` with some defaults pre-filled. """ - execute_tasks_v2( + execute_tasks( wf_task_list=wf_task_list, workflow_dir_local=workflow_dir_local, job_attribute_filters=(job_attribute_filters or {}), diff --git a/tests/v2/test_04_runner/test_dummy_examples.py b/tests/v2/test_04_runner/test_dummy_examples.py index 291ef20478..5801920b7f 100644 --- a/tests/v2/test_04_runner/test_dummy_examples.py +++ b/tests/v2/test_04_runner/test_dummy_examples.py @@ -18,7 +18,7 @@ from fractal_server.urls import normalize_url from .aux_get_dataset_attrs import _get_dataset_attrs -from .execute_tasks_v2 import execute_tasks_v2_mod +from .execute_tasks_v2 import execute_tasks_mod async def _find_last_history_unit(db: AsyncSession) -> HistoryUnit: @@ -100,11 +100,11 @@ def local_runner( async def test_dummy_insert_single_image( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -119,16 +119,16 @@ async def test_dummy_insert_single_image( runner=local_runner, user_id=user.id, ) - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2(project_id=project.id, zarr_dir=zarr_dir) - workflow = await workflow_factory_v2(project_id=project.id) - wftask = await workflowtask_factory_v2( + dataset = await dataset_factory(project_id=project.id, zarr_dir=zarr_dir) + workflow = await workflow_factory(project_id=project.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -137,7 +137,7 @@ async def test_dummy_insert_single_image( ) # Case 0: Run successfully on an empty dataset - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -148,7 +148,7 @@ async def test_dummy_insert_single_image( # Case 1: Run successfully even if the image already exists db.expunge_all() dataset = await db.get(DatasetV2, dataset.id) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job1", @@ -179,12 +179,12 @@ async def test_dummy_insert_single_image( }, ), ] - dataset_case_2 = await dataset_factory_v2( + dataset_case_2 = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, images=IMAGES, ) - wftask = await workflowtask_factory_v2( + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, args_non_parallel={ @@ -198,7 +198,7 @@ async def test_dummy_insert_single_image( ) }, ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset_case_2, workflow_dir_local=tmp_path / "job2", @@ -240,7 +240,7 @@ async def test_dummy_insert_single_image( for _args_non_parallel in [{"fail": True}, {"fail_2": True}]: debug(_args_non_parallel) - wftask = await workflowtask_factory_v2( + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, args_non_parallel=_args_non_parallel, @@ -248,7 +248,7 @@ async def test_dummy_insert_single_image( db.expunge_all() dataset = await db.get(DatasetV2, dataset.id) with pytest.raises(JobExecutionError) as e: - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job3", @@ -264,11 +264,11 @@ async def test_dummy_insert_single_image( async def test_dummy_remove_images( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -282,10 +282,10 @@ async def test_dummy_remove_images( async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) + project = await project_factory(user) - workflow = await workflow_factory_v2(project_id=project.id) - wftask = await workflowtask_factory_v2( + workflow = await workflow_factory(project_id=project.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, @@ -293,9 +293,9 @@ async def test_dummy_remove_images( # Run successfully on a dataset which includes the images to be # removed - project = await project_factory_v2(user) + project = await project_factory(user) N = 3 - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, images=[ @@ -308,7 +308,7 @@ async def test_dummy_remove_images( res = await db.execute(select(func.count(HistoryImageCache.zarr_url))) assert res.scalar() == 0 - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -329,7 +329,7 @@ async def test_dummy_remove_images( res = await db.execute(select(func.count(HistoryImageCache.zarr_url))) assert res.scalar() == N + 1 - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -344,12 +344,12 @@ async def test_dummy_remove_images( assert res.scalar() == 1 # Fail when removing images that do not exist - dataset_pre_fail = await dataset_factory_v2( + dataset_pre_fail = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, images=[dict(zarr_url=Path(zarr_dir, "another-image").as_posix())], ) - wftask = await workflowtask_factory_v2( + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, @@ -358,7 +358,7 @@ async def test_dummy_remove_images( ), ) with pytest.raises(JobExecutionError) as e: - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset_pre_fail, workflow_dir_local=tmp_path / "job1", @@ -373,11 +373,11 @@ async def test_dummy_remove_images( async def test_dummy_unset_attribute( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -387,12 +387,12 @@ async def test_dummy_unset_attribute( async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) + project = await project_factory(user) - workflow = await workflow_factory_v2(project_id=project.id) + workflow = await workflow_factory(project_id=project.id) # Unset an existing attribute (starting from dataset_pre) - dataset1 = await dataset_factory_v2( + dataset1 = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, images=[ @@ -403,20 +403,20 @@ async def test_dummy_unset_attribute( ) ], ) - wftask = await workflowtask_factory_v2( + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, args_non_parallel=dict(attribute="key2"), ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset1.id, workflow_id=workflow.id, working_dir="/foo", status="done", ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset1, workflow_dir_local=tmp_path / "job0", @@ -430,7 +430,7 @@ async def test_dummy_unset_attribute( assert "key2" not in dataset_attrs["images"][0]["attributes"].keys() # Unset a missing attribute (starting from dataset_pre) - dataset2 = await dataset_factory_v2( + dataset2 = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, images=[ @@ -441,13 +441,13 @@ async def test_dummy_unset_attribute( ) ], ) - wftask = await workflowtask_factory_v2( + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, args_non_parallel=dict(attribute="missing-attribute"), ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset2, workflow_dir_local=tmp_path / "job1", @@ -466,11 +466,11 @@ async def test_dummy_unset_attribute( async def test_dummy_insert_single_image_with_attribute_none( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -480,27 +480,27 @@ async def test_dummy_insert_single_image_with_attribute_none( task_id = fractal_tasks_mock_db["dummy_insert_single_image"].id async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) - wftask = await workflowtask_factory_v2( + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, args_non_parallel=dict(attributes={"attribute-name": None}), ) # Run successfully on an empty dataset - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, working_dir="/foo", status="done", ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -516,11 +516,11 @@ async def test_dummy_insert_single_image_with_attribute_none( async def test_dummy_insert_single_image_normalization( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -530,27 +530,27 @@ async def test_dummy_insert_single_image_normalization( task_id = fractal_tasks_mock_db["dummy_insert_single_image"].id async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) - wftask = await workflowtask_factory_v2( + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, args_non_parallel={"trailing_slash": True}, ) # Run successfully on an empty dataset - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, working_dir="/foo", status="done", ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -568,11 +568,11 @@ async def test_dummy_insert_single_image_normalization( async def test_default_inclusion_of_images( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -586,9 +586,9 @@ async def test_default_inclusion_of_images( task_id = fractal_tasks_mock_db["generic_task_parallel"].id async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) - wftask = await workflowtask_factory_v2( + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, @@ -603,19 +603,19 @@ async def test_default_inclusion_of_images( types={}, ) ] - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, images=images, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, working_dir="/foo", status="done", ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -633,11 +633,11 @@ async def test_default_inclusion_of_images( async def test_compound_task_with_compute_failure( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -647,9 +647,9 @@ async def test_compound_task_with_compute_failure( task_id = fractal_tasks_mock_db["generic_task_compound"].id async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) - wftask = await workflowtask_factory_v2( + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, @@ -666,12 +666,12 @@ async def test_compound_task_with_compute_failure( ] # Run and fail - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, images=images, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -679,7 +679,7 @@ async def test_compound_task_with_compute_failure( status="done", ) with pytest.raises(JobExecutionError) as exc_info: - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -695,11 +695,11 @@ async def test_dummy_invalid_output_non_parallel( db, MockCurrentUser, monkeypatch, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -713,7 +713,7 @@ async def test_dummy_invalid_output_non_parallel( runner=local_runner, user_id=user.id, ) - project = await project_factory_v2(user) + project = await project_factory(user) IMAGES = [ dict( zarr_url=Path(zarr_dir, "my-image").as_posix(), @@ -721,17 +721,17 @@ async def test_dummy_invalid_output_non_parallel( attributes={}, ) ] - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, images=IMAGES ) - workflow = await workflow_factory_v2(project_id=project.id) - wftask = await workflowtask_factory_v2( + workflow = await workflow_factory(project_id=project.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -751,7 +751,7 @@ def patched_cast(*args, **kwargs): patched_cast, ) with pytest.raises(JobExecutionError): - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -773,11 +773,11 @@ async def test_dummy_invalid_output_parallel( db, MockCurrentUser, monkeypatch, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -789,7 +789,7 @@ async def test_dummy_invalid_output_parallel( runner=local_runner, user_id=user.id, ) - project = await project_factory_v2(user) + project = await project_factory(user) IMAGES = [ dict( zarr_url=Path(zarr_dir, "my-image").as_posix(), @@ -798,16 +798,16 @@ async def test_dummy_invalid_output_parallel( ) ] - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, images=IMAGES ) - workflow = await workflow_factory_v2(project_id=project.id) - wftask = await workflowtask_factory_v2( + workflow = await workflow_factory(project_id=project.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -831,7 +831,7 @@ def patched_task_output(*args, **kwargs): patched_task_output, ) with pytest.raises(JobExecutionError): - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -852,11 +852,11 @@ def patched_task_output(*args, **kwargs): async def test_status_based_submission( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -879,35 +879,35 @@ async def test_status_based_submission( async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) + project = await project_factory(user) - workflow = await workflow_factory_v2(project_id=project.id) - wftask_failing = await workflowtask_factory_v2( + workflow = await workflow_factory(project_id=project.id) + wftask_failing = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, args_non_parallel=dict(raise_error=True), ) - wftask_ok = await workflowtask_factory_v2( + wftask_ok = await workflowtask_factory( workflow_id=workflow.id, task_id=task_id, order=0, ) # Case 1: Run and fail for B00 and B01 (by requiring the UNSET ones) - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, images=IMAGES, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, working_dir="/foo", ) with pytest.raises(JobExecutionError): - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask_failing], dataset=dataset, workflow_dir_local=tmp_path / str(job.id), @@ -938,14 +938,14 @@ async def test_status_based_submission( assert history_unit.status == HistoryUnitStatusWithUnset.FAILED # Case 1: Run and fail for no images (by requiring the DONE ones) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, working_dir="/foo", ) with pytest.raises(JobExecutionError, match="empty image list"): - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask_ok], dataset=dataset, workflow_dir_local=tmp_path / str(job.id), @@ -962,13 +962,13 @@ async def test_status_based_submission( assert last_history_run.status == HistoryUnitStatus.FAILED # Case 1: Run and succeed for no images (by requiring the UNSET ones) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, working_dir="/foo", ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask_ok], dataset=dataset, workflow_dir_local=tmp_path / str(job.id), @@ -988,18 +988,18 @@ async def test_status_based_submission( debug(res.scalars().all()) # Case 2: Run successfully on all images - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, images=IMAGES, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, working_dir="/foo", ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask_ok], dataset=dataset, workflow_dir_local=tmp_path / str(job.id), diff --git a/tests/v2/test_04_runner/test_fractal_examples.py b/tests/v2/test_04_runner/test_fractal_examples.py index 5d1a9f4095..8b6e032bdb 100644 --- a/tests/v2/test_04_runner/test_fractal_examples.py +++ b/tests/v2/test_04_runner/test_fractal_examples.py @@ -10,7 +10,7 @@ from fractal_server.runner.executors.local.runner import LocalRunner from .aux_get_dataset_attrs import _get_dataset_attrs -from .execute_tasks_v2 import execute_tasks_v2_mod +from .execute_tasks_v2 import execute_tasks_mod @pytest.fixture() @@ -51,11 +51,11 @@ def image_data_exist_on_disk(image_list: list[SingleImage]): async def test_fractal_demos_01( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -67,45 +67,45 @@ async def test_fractal_demos_01( zarr_dir = (tmp_path / "zarr_dir").as_posix().rstrip("/") async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2(project_id=project.id, zarr_dir=zarr_dir) - workflow = await workflow_factory_v2(project_id=project.id) + dataset = await dataset_factory(project_id=project.id, zarr_dir=zarr_dir) + workflow = await workflow_factory(project_id=project.id) - wftask0 = await workflowtask_factory_v2( + wftask0 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["create_ome_zarr_compound"].id, order=0, args_non_parallel=dict(image_dir="/tmp/input_images"), args_parallel={}, ) - wftask1 = await workflowtask_factory_v2( + wftask1 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["illumination_correction"].id, args_parallel=dict(overwrite_input=True), order=1, ) - wftask2 = await workflowtask_factory_v2( + wftask2 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["MIP_compound"].id, args_non_parallel=dict(suffix="mip"), args_parallel={}, order=2, ) - wftask3 = await workflowtask_factory_v2( + wftask3 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["cellpose_segmentation"].id, args_parallel={}, order=3, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, working_dir="/foo", status="done", ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask0], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -117,10 +117,10 @@ async def test_fractal_demos_01( _assert_image_data_exist(dataset_attrs["images"]) assert len(dataset_attrs["images"]) == 2 - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask1], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job1", @@ -152,10 +152,10 @@ async def test_fractal_demos_01( } _assert_image_data_exist(dataset_attrs["images"]) - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask2], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job2", @@ -186,10 +186,10 @@ async def test_fractal_demos_01( }, } _assert_image_data_exist(dataset_attrs["images"]) - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask3], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job3", @@ -208,11 +208,11 @@ async def test_fractal_demos_01( async def test_fractal_demos_01_no_overwrite( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -225,38 +225,38 @@ async def test_fractal_demos_01_no_overwrite( zarr_dir = (tmp_path / "zarr_dir").as_posix().rstrip("/") async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2(project_id=project.id, zarr_dir=zarr_dir) - workflow = await workflow_factory_v2(project_id=project.id) + dataset = await dataset_factory(project_id=project.id, zarr_dir=zarr_dir) + workflow = await workflow_factory(project_id=project.id) - wftask0 = await workflowtask_factory_v2( + wftask0 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["create_ome_zarr_compound"].id, order=0, args_non_parallel=dict(image_dir="/tmp/input_images"), args_parallel={}, ) - wftask1 = await workflowtask_factory_v2( + wftask1 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["illumination_correction"].id, args_parallel=dict(overwrite_input=False), order=1, ) - wftask2 = await workflowtask_factory_v2( + wftask2 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["MIP_compound"].id, args_non_parallel=dict(suffix="mip"), args_parallel={}, order=2, ) - wftask3 = await workflowtask_factory_v2( + wftask3 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["cellpose_segmentation"].id, args_parallel={}, order=3, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -264,7 +264,7 @@ async def test_fractal_demos_01_no_overwrite( status="done", ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask0], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -281,10 +281,10 @@ async def test_fractal_demos_01_no_overwrite( _assert_image_data_exist(dataset_attrs["images"]) # Run illumination correction with overwrite_input=False - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask1], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job1", @@ -347,10 +347,10 @@ async def test_fractal_demos_01_no_overwrite( }, } _assert_image_data_exist(dataset_attrs["images"]) - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask2], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job2", @@ -397,10 +397,10 @@ async def test_fractal_demos_01_no_overwrite( }, } _assert_image_data_exist(dataset_attrs["images"]) - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask3], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job3", @@ -418,11 +418,11 @@ async def test_fractal_demos_01_no_overwrite( async def test_registration_no_overwrite( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -430,36 +430,36 @@ async def test_registration_no_overwrite( zarr_dir = (tmp_path / "zarr_dir").as_posix().rstrip("/") async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2(project_id=project.id, zarr_dir=zarr_dir) - workflow = await workflow_factory_v2(project_id=project.id) + dataset = await dataset_factory(project_id=project.id, zarr_dir=zarr_dir) + workflow = await workflow_factory(project_id=project.id) - wftask0 = await workflowtask_factory_v2( + wftask0 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["create_ome_zarr_multiplex_compound"].id, order=0, args_non_parallel=dict(image_dir="/tmp/input_images"), args_parallel={}, ) - wftask1 = await workflowtask_factory_v2( + wftask1 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["calculate_registration_compound"].id, args_non_parallel={"ref_acquisition": 0}, order=1, ) - wftask2 = await workflowtask_factory_v2( + wftask2 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["find_registration_consensus"].id, order=2, ) - wftask3 = await workflowtask_factory_v2( + wftask3 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["apply_registration_to_image"].id, args_parallel={"overwrite_input": False}, order=3, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -467,7 +467,7 @@ async def test_registration_no_overwrite( status="done", ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask0], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -478,10 +478,10 @@ async def test_registration_no_overwrite( dataset_attrs = await _get_dataset_attrs(db, dataset.id) # Run init registration - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask1], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job1", @@ -499,10 +499,10 @@ async def test_registration_no_overwrite( assert os.path.isfile(f"{image['zarr_url']}/registration_table") # Run find_registration_consensus - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask2], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job2", @@ -520,10 +520,10 @@ async def test_registration_no_overwrite( assert len(dataset_attrs["images"]) == 6 # Run apply_registration_to_image - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask3], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job3", @@ -540,11 +540,11 @@ async def test_registration_no_overwrite( async def test_registration_overwrite( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -552,36 +552,36 @@ async def test_registration_overwrite( zarr_dir = (tmp_path / "zarr_dir").as_posix().rstrip("/") async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2(project_id=project.id, zarr_dir=zarr_dir) - workflow = await workflow_factory_v2(project_id=project.id) + dataset = await dataset_factory(project_id=project.id, zarr_dir=zarr_dir) + workflow = await workflow_factory(project_id=project.id) - wftask0 = await workflowtask_factory_v2( + wftask0 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["create_ome_zarr_multiplex_compound"].id, order=0, args_non_parallel=dict(image_dir="/tmp/input_images"), args_parallel={}, ) - wftask1 = await workflowtask_factory_v2( + wftask1 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["calculate_registration_compound"].id, args_non_parallel={"ref_acquisition": 0}, order=1, ) - wftask2 = await workflowtask_factory_v2( + wftask2 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["find_registration_consensus"].id, order=2, ) - wftask3 = await workflowtask_factory_v2( + wftask3 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["apply_registration_to_image"].id, args_parallel={"overwrite_input": True}, order=3, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -589,7 +589,7 @@ async def test_registration_overwrite( status="done", ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask0], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -600,10 +600,10 @@ async def test_registration_overwrite( dataset_attrs = await _get_dataset_attrs(db, dataset.id) # Run init registration - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask1], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job1", @@ -621,10 +621,10 @@ async def test_registration_overwrite( assert os.path.isfile(f"{image['zarr_url']}/registration_table") # Run find_registration_consensus - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask2], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job2", @@ -642,10 +642,10 @@ async def test_registration_overwrite( assert len(dataset_attrs["images"]) == 6 # Run apply_registration_to_image - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask3], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job3", @@ -664,11 +664,11 @@ async def test_registration_overwrite( async def test_channel_parallelization_with_overwrite( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -676,25 +676,25 @@ async def test_channel_parallelization_with_overwrite( zarr_dir = (tmp_path / "zarr_dir").as_posix().rstrip("/") async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2(project_id=project.id, zarr_dir=zarr_dir) - workflow = await workflow_factory_v2(project_id=project.id) + dataset = await dataset_factory(project_id=project.id, zarr_dir=zarr_dir) + workflow = await workflow_factory(project_id=project.id) - wftask0 = await workflowtask_factory_v2( + wftask0 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["create_ome_zarr_compound"].id, order=0, args_non_parallel=dict(image_dir="/tmp/input_images"), ) - wftask1 = await workflowtask_factory_v2( + wftask1 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["illumination_correction_compound"].id, order=1, args_non_parallel=dict(overwrite_input=True), args_parallel=dict(another_argument="something"), ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -703,7 +703,7 @@ async def test_channel_parallelization_with_overwrite( ) # Run create_ome_zarr+yokogawa_to_zarr - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask0], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -716,7 +716,7 @@ async def test_channel_parallelization_with_overwrite( # Run illumination_correction_compound db.expunge_all() dataset = await db.get(DatasetV2, dataset.id) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask1], dataset=dataset, workflow_dir_local=tmp_path / "job1", @@ -733,11 +733,11 @@ async def test_channel_parallelization_with_overwrite( async def test_channel_parallelization_no_overwrite( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: LocalRunner, fractal_tasks_mock_db, @@ -745,25 +745,25 @@ async def test_channel_parallelization_no_overwrite( zarr_dir = (tmp_path / "zarr_dir").as_posix().rstrip("/") async with MockCurrentUser() as user: user_id = user.id - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2(project_id=project.id, zarr_dir=zarr_dir) - workflow = await workflow_factory_v2(project_id=project.id) + dataset = await dataset_factory(project_id=project.id, zarr_dir=zarr_dir) + workflow = await workflow_factory(project_id=project.id) - wftask0 = await workflowtask_factory_v2( + wftask0 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["create_ome_zarr_compound"].id, order=0, args_non_parallel=dict(image_dir="/tmp/input_images"), ) - wftask1 = await workflowtask_factory_v2( + wftask1 = await workflowtask_factory( workflow_id=workflow.id, task_id=fractal_tasks_mock_db["illumination_correction_compound"].id, order=1, args_non_parallel=dict(overwrite_input=False), args_parallel=dict(another_argument="something"), ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -772,7 +772,7 @@ async def test_channel_parallelization_no_overwrite( ) # Run create_ome_zarr+yokogawa_to_zarr - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask0], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -783,10 +783,10 @@ async def test_channel_parallelization_no_overwrite( dataset_attrs = await _get_dataset_attrs(db, dataset.id) # Run illumination_correction_compound - dataset_with_attrs = await dataset_factory_v2( + dataset_with_attrs = await dataset_factory( project_id=project.id, zarr_dir=zarr_dir, **dataset_attrs ) - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask1], dataset=dataset_with_attrs, workflow_dir_local=tmp_path / "job1", diff --git a/tests/v2/test_04_runner/test_no_images_parallelization.py b/tests/v2/test_04_runner/test_no_images_parallelization.py index 543d16d6f0..6bebe65921 100644 --- a/tests/v2/test_04_runner/test_no_images_parallelization.py +++ b/tests/v2/test_04_runner/test_no_images_parallelization.py @@ -6,7 +6,7 @@ from fractal_server.runner.exceptions import JobExecutionError from fractal_server.runner.executors.local.runner import LocalRunner -from .execute_tasks_v2 import execute_tasks_v2_mod +from .execute_tasks_v2 import execute_tasks_mod @pytest.fixture() @@ -27,12 +27,12 @@ def local_runner( async def test_parallelize_on_no_images( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, + workflowtask_factory, + job_factory, tmp_path: Path, local_runner: Executor, ): @@ -41,22 +41,22 @@ async def test_parallelize_on_no_images( """ # Preliminary setup async with MockCurrentUser() as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id) + workflow = await workflow_factory(project_id=project.id) - task = await task_factory_v2( + task = await task_factory( name="name-1", type="parallel", command_parallel="echo", user_id=user.id, ) - wftask = await workflowtask_factory_v2( + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id, order=0, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -64,7 +64,7 @@ async def test_parallelize_on_no_images( status="done", ) with pytest.raises(JobExecutionError, match="empty image list"): - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job0", @@ -73,20 +73,20 @@ async def test_parallelize_on_no_images( job_id=job.id, ) - task = await task_factory_v2( + task = await task_factory( name="name-2", type="compound", command_non_parallel="echo", command_parallel="echo", user_id=user.id, ) - wftask = await workflowtask_factory_v2( + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id, order=0, ) with pytest.raises(JobExecutionError, match="empty image list"): - execute_tasks_v2_mod( + execute_tasks_mod( wf_task_list=[wftask], dataset=dataset, workflow_dir_local=tmp_path / "job1", diff --git a/tests/v2/test_04_runner/test_unit_db_tools.py b/tests/v2/test_04_runner/test_unit_db_tools.py index 74c3828407..13ac66d0c5 100644 --- a/tests/v2/test_04_runner/test_unit_db_tools.py +++ b/tests/v2/test_04_runner/test_unit_db_tools.py @@ -17,24 +17,24 @@ async def test_update_status_of_history_unit( # Fixtures db_sync, db, - dataset_factory_v2, - project_factory_v2, - task_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + dataset_factory, + project_factory, + task_factory, + workflow_factory, + workflowtask_factory, + job_factory, MockCurrentUser, ): async with MockCurrentUser() as user: - task = await task_factory_v2(user.id) - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id) - workflow = await workflow_factory_v2(project_id=project.id) - wftask = await workflowtask_factory_v2( + task = await task_factory(user.id) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id) + workflow = await workflow_factory(project_id=project.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, diff --git a/tests/v2/test_04_runner/test_unit_submit_workflow.py b/tests/v2/test_04_runner/test_unit_submit_workflow.py index d04cfa6b1b..d388796cb2 100644 --- a/tests/v2/test_04_runner/test_unit_submit_workflow.py +++ b/tests/v2/test_04_runner/test_unit_submit_workflow.py @@ -12,24 +12,24 @@ async def test_fail_submit_workflows_wrong_IDs( MockCurrentUser, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - task_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + dataset_factory, + task_factory, + job_factory, tmp_path, db, local_resource_profile_objects, ): res, prof = local_resource_profile_objects[:] async with MockCurrentUser() as user: - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id) + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id) await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) - dataset = await dataset_factory_v2(project_id=project.id) + dataset = await dataset_factory(project_id=project.id) submit_workflow( workflow_id=workflow.id, @@ -41,7 +41,7 @@ async def test_fail_submit_workflows_wrong_IDs( user_cache_dir=tmp_path / "cache", ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -62,11 +62,11 @@ async def test_fail_submit_workflows_wrong_IDs( async def test_mkdir_error( - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, + job_factory, db, tmp_path, MockCurrentUser, @@ -80,14 +80,14 @@ async def test_mkdir_error( prof.username = None async with MockCurrentUser(user_kwargs={"is_verified": True}) as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id, name="ds") - workflow = await workflow_factory_v2(project_id=project.id, name="wf") # noqa - task = await task_factory_v2(user_id=user.id) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id, name="ds") + workflow = await workflow_factory(project_id=project.id, name="wf") # noqa + task = await task_factory(user_id=user.id) await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -115,11 +115,11 @@ async def test_mkdir_error( async def test_submit_workflow_failure( tmp_path, - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - dataset_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + task_factory, + dataset_factory, + job_factory, MockCurrentUser, db, local_resource_profile_objects, @@ -136,14 +136,14 @@ async def test_submit_workflow_failure( assert working_dir.exists() async with MockCurrentUser() as user: - task = await task_factory_v2(user_id=user.id) - project = await project_factory_v2(user=user) - workflow = await workflow_factory_v2(project_id=project.id) + task = await task_factory(user_id=user.id) + project = await project_factory(user=user) + workflow = await workflow_factory(project_id=project.id) await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) - dataset = await dataset_factory_v2(project_id=project.id) - job = await job_factory_v2( + dataset = await dataset_factory(project_id=project.id) + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, diff --git a/tests/v2/test_06_tasks_lifecycle/test_api_task_lifecycle.py b/tests/v2/test_06_tasks_lifecycle/test_api_task_lifecycle.py index af0f65678f..a3eaa9a75a 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_api_task_lifecycle.py +++ b/tests/v2/test_06_tasks_lifecycle/test_api_task_lifecycle.py @@ -38,7 +38,7 @@ async def test_deactivate_task_group_api( client, MockCurrentUser, db, - task_factory_v2, + task_factory, FRACTAL_RUNNER_BACKEND, slurm_ssh_resource_profile_fake_db, local_resource_profile_db, @@ -48,7 +48,7 @@ async def test_deactivate_task_group_api( """ async with MockCurrentUser() as different_user: - non_accessible_task = await task_factory_v2( + non_accessible_task = await task_factory( user_id=different_user.id, name="task" ) @@ -60,18 +60,18 @@ async def test_deactivate_task_group_api( async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: # Create mock task groups - non_active_task = await task_factory_v2( + non_active_task = await task_factory( user_id=user.id, name="task1", task_group_kwargs=dict(active=False), ) - task_other = await task_factory_v2( + task_other = await task_factory( user_id=user.id, version=None, name="task2", task_group_kwargs=dict(origin="other"), ) - task_pypi = await task_factory_v2( + task_pypi = await task_factory( user_id=user.id, name="task3", version="1.2.3", @@ -139,7 +139,7 @@ async def test_reactivate_task_group_api( client, MockCurrentUser, db, - task_factory_v2, + task_factory, current_py_version, FRACTAL_RUNNER_BACKEND, slurm_ssh_resource_profile_fake_db, @@ -150,7 +150,7 @@ async def test_reactivate_task_group_api( """ async with MockCurrentUser() as different_user: - non_accessible_task = await task_factory_v2( + non_accessible_task = await task_factory( user_id=different_user.id, name="task1" ) @@ -161,14 +161,14 @@ async def test_reactivate_task_group_api( resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: # Create mock task groups - active_task = await task_factory_v2(user_id=user.id, name="task2") - task_other = await task_factory_v2( + active_task = await task_factory(user_id=user.id, name="task2") + task_other = await task_factory( user_id=user.id, version=None, name="task3", task_group_kwargs=dict(active=False), ) - task_pypi = await task_factory_v2( + task_pypi = await task_factory( user_id=user.id, name="task4", version="1.2.3", @@ -483,7 +483,7 @@ async def test_lifecycle_slurm_ssh( async def test_fail_due_to_ongoing_activities( - client, MockCurrentUser, db, task_factory_v2, local_resource_profile_db + client, MockCurrentUser, db, task_factory, local_resource_profile_db ): """ Test that deactivate/reactivate endpoints fail if other @@ -492,7 +492,7 @@ async def test_fail_due_to_ongoing_activities( resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: # Create mock objects - task = await task_factory_v2(user_id=user.id, name="task") + task = await task_factory(user_id=user.id, name="task") task_group = await db.get(TaskGroupV2, task.taskgroupv2_id) db.add(task_group) await db.commit() @@ -533,28 +533,28 @@ async def test_lifecycle_actions_with_submitted_jobs( db, client, MockCurrentUser, - task_factory_v2, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, + task_factory, + project_factory, + workflow_factory, + dataset_factory, local_resource_profile_db, ): resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: # Create mock task groups - active_task = await task_factory_v2( + active_task = await task_factory( user_id=user.id, name="task-active", task_group_kwargs=dict(active=True), ) - non_active_task = await task_factory_v2( + non_active_task = await task_factory( user_id=user.id, name="task-non-active", task_group_kwargs=dict(active=False), ) - p = await project_factory_v2(user=user) - wf = await workflow_factory_v2() - ds = await dataset_factory_v2() + p = await project_factory(user=user) + wf = await workflow_factory() + ds = await dataset_factory() for task in [active_task, non_active_task]: await _workflow_insert_task( workflow_id=wf.id, diff --git a/tests/v2/test_06_tasks_lifecycle/test_unit_aux_functions_task_lifecycle.py b/tests/v2/test_06_tasks_lifecycle/test_unit_aux_functions_task_lifecycle.py index 929e899467..1da3b3a773 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_unit_aux_functions_task_lifecycle.py +++ b/tests/v2/test_06_tasks_lifecycle/test_unit_aux_functions_task_lifecycle.py @@ -13,9 +13,9 @@ async def test_check_no_related_workflowtask( db, client, MockCurrentUser, - project_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, + project_factory, + workflow_factory, + workflowtask_factory, local_resource_profile_db, ): resource, profile = local_resource_profile_db @@ -36,10 +36,10 @@ async def test_check_no_related_workflowtask( await check_no_related_workflowtask(task_group=task_group, db=db) - project = await project_factory_v2(user) - workflow = await workflow_factory_v2(project_id=project.id) + project = await project_factory(user) + workflow = await workflow_factory(project_id=project.id) - await workflowtask_factory_v2( + await workflowtask_factory( workflow_id=workflow.id, task_id=task_group.task_list[-1].id ) diff --git a/tests/v2/test_07_full_workflow/test_full_workflow_local.py b/tests/v2/test_07_full_workflow/test_full_workflow_local.py index 3b6d57bfb9..e7206a0ce5 100644 --- a/tests/v2/test_07_full_workflow/test_full_workflow_local.py +++ b/tests/v2/test_07_full_workflow/test_full_workflow_local.py @@ -22,9 +22,9 @@ async def test_full_workflow_local( client, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, + project_factory, + dataset_factory, + workflow_factory, override_settings_factory, fractal_tasks_mock_db, local_resource_profile_db, @@ -33,9 +33,9 @@ async def test_full_workflow_local( resource, profile = local_resource_profile_db await full_workflow( MockCurrentUser=MockCurrentUser, - project_factory_v2=project_factory_v2, - dataset_factory_v2=dataset_factory_v2, - workflow_factory_v2=workflow_factory_v2, + project_factory_v2=project_factory, + dataset_factory_v2=dataset_factory, + workflow_factory_v2=workflow_factory, client=client, tasks=fractal_tasks_mock_db, user_kwargs=dict( @@ -48,9 +48,9 @@ async def test_full_workflow_local( async def test_full_workflow_TaskExecutionError( client, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, + project_factory, + dataset_factory, + workflow_factory, override_settings_factory, fractal_tasks_mock_db, local_resource_profile_db, @@ -64,9 +64,9 @@ async def test_full_workflow_TaskExecutionError( resource, profile = local_resource_profile_db await full_workflow_TaskExecutionError( MockCurrentUser=MockCurrentUser, - project_factory_v2=project_factory_v2, - dataset_factory_v2=dataset_factory_v2, - workflow_factory_v2=workflow_factory_v2, + project_factory_v2=project_factory, + dataset_factory_v2=dataset_factory, + workflow_factory_v2=workflow_factory, client=client, tasks=fractal_tasks_mock_db, user_kwargs=dict(profile_id=profile.id), @@ -78,10 +78,10 @@ async def test_non_executable_task_command_local( client, MockCurrentUser, testdata_path, - task_factory_v2, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, + task_factory, + project_factory, + dataset_factory, + workflow_factory, override_settings_factory, local_resource_profile_db, ): @@ -95,10 +95,10 @@ async def test_non_executable_task_command_local( MockCurrentUser=MockCurrentUser, client=client, testdata_path=testdata_path, - project_factory_v2=project_factory_v2, - workflow_factory_v2=workflow_factory_v2, - dataset_factory_v2=dataset_factory_v2, - task_factory_v2=task_factory_v2, + project_factory_v2=project_factory, + workflow_factory_v2=workflow_factory, + dataset_factory_v2=dataset_factory, + task_factory_v2=task_factory, user_kwargs=dict( profile_id=profile.id, ), @@ -109,10 +109,10 @@ async def test_non_executable_task_command_local( async def test_failing_workflow_UnknownError_local( client, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, monkeypatch, override_settings_factory, local_resource_profile_db, @@ -128,10 +128,10 @@ async def test_failing_workflow_UnknownError_local( MockCurrentUser=MockCurrentUser, client=client, monkeypatch=monkeypatch, - project_factory_v2=project_factory_v2, - dataset_factory_v2=dataset_factory_v2, - workflow_factory_v2=workflow_factory_v2, - task_factory_v2=task_factory_v2, + project_factory_v2=project_factory, + dataset_factory_v2=dataset_factory, + workflow_factory_v2=workflow_factory, + task_factory_v2=task_factory, user_kwargs=dict(profile_id=profile.id), resource_id=resource.id, ) @@ -144,10 +144,10 @@ async def test_failing_workflow_UnknownError_local( async def test_non_python_task_local( client, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, testdata_path, tmp777_path, local_resource_profile_db, @@ -161,10 +161,10 @@ async def test_non_python_task_local( await workflow_with_non_python_task( client=client, MockCurrentUser=MockCurrentUser, - project_factory_v2=project_factory_v2, - dataset_factory_v2=dataset_factory_v2, - workflow_factory_v2=workflow_factory_v2, - task_factory_v2=task_factory_v2, + project_factory_v2=project_factory, + dataset_factory_v2=dataset_factory, + workflow_factory_v2=workflow_factory, + task_factory_v2=task_factory, testdata_path=testdata_path, tmp777_path=tmp777_path, additional_user_kwargs=dict(profile_id=profile.id), @@ -175,9 +175,9 @@ async def test_non_python_task_local( async def test_failing_workflow_post_task_execution( client, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, + project_factory, + dataset_factory, + workflow_factory, override_settings_factory, tmp_path, fractal_tasks_mock_db, @@ -188,9 +188,9 @@ async def test_failing_workflow_post_task_execution( await failing_workflow_post_task_execution( MockCurrentUser=MockCurrentUser, - project_factory_v2=project_factory_v2, - dataset_factory_v2=dataset_factory_v2, - workflow_factory_v2=workflow_factory_v2, + project_factory_v2=project_factory, + dataset_factory_v2=dataset_factory, + workflow_factory_v2=workflow_factory, client=client, tasks=fractal_tasks_mock_db, tmp_path=tmp_path, diff --git a/tests/v2/test_07_full_workflow/test_full_workflow_slurm_ssh.py b/tests/v2/test_07_full_workflow/test_full_workflow_slurm_ssh.py index 5520ac7c49..117ea0c834 100644 --- a/tests/v2/test_07_full_workflow/test_full_workflow_slurm_ssh.py +++ b/tests/v2/test_07_full_workflow/test_full_workflow_slurm_ssh.py @@ -15,10 +15,10 @@ async def test_workflow_with_non_python_task_slurm_ssh( MockCurrentUser, testdata_path, tmp777_path, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, slurm_ssh_resource_profile_db, override_settings_factory, ): @@ -36,10 +36,10 @@ async def test_workflow_with_non_python_task_slurm_ssh( resource_id=resource.id, client=client, testdata_path=testdata_path, - project_factory_v2=project_factory_v2, - dataset_factory_v2=dataset_factory_v2, - workflow_factory_v2=workflow_factory_v2, - task_factory_v2=task_factory_v2, + project_factory_v2=project_factory, + dataset_factory_v2=dataset_factory, + workflow_factory_v2=workflow_factory, + task_factory_v2=task_factory, tmp777_path=tmp777_path, ) @@ -54,10 +54,10 @@ async def test_workflow_with_non_python_task_slurm_ssh_fail( MockCurrentUser, testdata_path, tmp777_path, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, slurm_ssh_resource_profile_db, override_settings_factory, db, @@ -86,10 +86,10 @@ async def test_workflow_with_non_python_task_slurm_ssh_fail( ), resource_id=resource.id, testdata_path=testdata_path, - project_factory_v2=project_factory_v2, - dataset_factory_v2=dataset_factory_v2, - workflow_factory_v2=workflow_factory_v2, - task_factory_v2=task_factory_v2, + project_factory_v2=project_factory, + dataset_factory_v2=dataset_factory, + workflow_factory_v2=workflow_factory, + task_factory_v2=task_factory, tmp777_path=tmp777_path, this_should_fail=True, ) diff --git a/tests/v2/test_07_full_workflow/test_full_workflow_slurm_sudo.py b/tests/v2/test_07_full_workflow/test_full_workflow_slurm_sudo.py index 629fc6f7ea..1a961bbdd8 100644 --- a/tests/v2/test_07_full_workflow/test_full_workflow_slurm_sudo.py +++ b/tests/v2/test_07_full_workflow/test_full_workflow_slurm_sudo.py @@ -44,13 +44,13 @@ async def test_full_workflow_slurm( client, MockCurrentUser, tmp777_path, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, + project_factory, + dataset_factory, + workflow_factory, override_settings_factory, fractal_tasks_mock_db, slurm_sudo_resource_profile_db, - relink_python_interpreter_v2, # before 'monkey_slurm' (#1462) + relink_python_interpreter, # before 'monkey_slurm' (#1462) monkey_slurm, ): override_settings_factory(FRACTAL_RUNNER_BACKEND=FRACTAL_RUNNER_BACKEND) @@ -74,9 +74,9 @@ async def test_full_workflow_slurm( project_dirs=[project_dir], ), resource_id=resource.id, - project_factory_v2=project_factory_v2, - dataset_factory_v2=dataset_factory_v2, - workflow_factory_v2=workflow_factory_v2, + project_factory_v2=project_factory, + dataset_factory_v2=dataset_factory, + workflow_factory_v2=workflow_factory, client=client, tasks=fractal_tasks_mock_db, ) @@ -97,14 +97,14 @@ async def test_full_workflow_TaskExecutionError_slurm( client, MockCurrentUser, tmp777_path, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, + project_factory, + dataset_factory, + workflow_factory, override_settings_factory, fractal_tasks_mock_db, slurm_sudo_resource_profile_db, db, - relink_python_interpreter_v2, # before 'monkey_slurm' (#1462) + relink_python_interpreter, # before 'monkey_slurm' (#1462) monkey_slurm, ): """ " @@ -131,9 +131,9 @@ async def test_full_workflow_TaskExecutionError_slurm( project_dirs=[project_dir], ), resource_id=resource.id, - project_factory_v2=project_factory_v2, - dataset_factory_v2=dataset_factory_v2, - workflow_factory_v2=workflow_factory_v2, + project_factory_v2=project_factory, + dataset_factory_v2=dataset_factory, + workflow_factory_v2=workflow_factory, client=client, tasks=fractal_tasks_mock_db, ) @@ -147,13 +147,13 @@ async def test_non_executable_task_command_slurm( MockCurrentUser, testdata_path, tmp777_path, - task_factory_v2, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, + task_factory, + project_factory, + dataset_factory, + workflow_factory, override_settings_factory, slurm_sudo_resource_profile_db, - relink_python_interpreter_v2, # before 'monkey_slurm' (#1462) + relink_python_interpreter, # before 'monkey_slurm' (#1462) monkey_slurm, ): """ @@ -175,10 +175,10 @@ async def test_non_executable_task_command_slurm( resource_id=resource.id, client=client, testdata_path=testdata_path, - project_factory_v2=project_factory_v2, - workflow_factory_v2=workflow_factory_v2, - dataset_factory_v2=dataset_factory_v2, - task_factory_v2=task_factory_v2, + project_factory_v2=project_factory, + workflow_factory_v2=workflow_factory, + dataset_factory_v2=dataset_factory, + task_factory_v2=task_factory, ) _reset_permissions_for_user_folder(project_dir) @@ -189,15 +189,15 @@ async def test_failing_workflow_UnknownError_slurm( client, MockCurrentUser, tmp777_path, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, request, override_settings_factory, slurm_sudo_resource_profile_db, monkeypatch, - relink_python_interpreter_v2, # before 'monkey_slurm' (#1462) + relink_python_interpreter, # before 'monkey_slurm' (#1462) monkey_slurm, ): """ @@ -219,10 +219,10 @@ async def test_failing_workflow_UnknownError_slurm( resource_id=resource.id, client=client, monkeypatch=monkeypatch, - project_factory_v2=project_factory_v2, - dataset_factory_v2=dataset_factory_v2, - workflow_factory_v2=workflow_factory_v2, - task_factory_v2=task_factory_v2, + project_factory_v2=project_factory, + dataset_factory_v2=dataset_factory, + workflow_factory_v2=workflow_factory, + task_factory_v2=task_factory, ) _reset_permissions_for_user_folder(project_dir) diff --git a/tests/v2/test_08_backends/aux_unit_runner.py b/tests/v2/test_08_backends/aux_unit_runner.py index 675412c7ac..2797129cfb 100644 --- a/tests/v2/test_08_backends/aux_unit_runner.py +++ b/tests/v2/test_08_backends/aux_unit_runner.py @@ -22,23 +22,23 @@ async def history_run_mock( db, MockCurrentUser, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - workflowtask_factory_v2, - task_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + workflowtask_factory, + task_factory, + job_factory, tmp_path, ) -> HistoryRun: async with MockCurrentUser() as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id) - wftask = await workflowtask_factory_v2( + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id) + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, diff --git a/tests/v2/test_09_history/test_history_api.py b/tests/v2/test_09_history/test_history_api.py index 95dcdb5bfd..8e18c54c01 100644 --- a/tests/v2/test_09_history/test_history_api.py +++ b/tests/v2/test_09_history/test_history_api.py @@ -15,27 +15,27 @@ async def test_status_api( - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - dataset_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + task_factory, + dataset_factory, + workflowtask_factory, + job_factory, db, client, MockCurrentUser, ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id) + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id) # WorkflowTask 1 (one run, four units, different statuses) - wftask1 = await workflowtask_factory_v2( + wftask1 = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -107,7 +107,7 @@ async def test_status_api( ) await db.commit() - wftask2 = await workflowtask_factory_v2( + wftask2 = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id ) @@ -160,12 +160,12 @@ async def test_status_api( ], ) async def test_cascade_delete( - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - dataset_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + task_factory, + dataset_factory, + workflowtask_factory, + job_factory, db, client, MockCurrentUser, @@ -177,14 +177,14 @@ async def test_cascade_delete( ], ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id) - wftask = await workflowtask_factory_v2( + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id) + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -274,12 +274,12 @@ async def test_cascade_delete( async def test_get_history_run_list( - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - dataset_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + task_factory, + dataset_factory, + workflowtask_factory, + job_factory, db, client, MockCurrentUser, @@ -288,21 +288,21 @@ async def test_get_history_run_list( timestamp = datetime.now(tz=local_tz) async with MockCurrentUser() as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id) - workflow = await workflow_factory_v2(project_id=project.id) - task1 = await task_factory_v2(user_id=user.id, version="3.1.4") - task2 = await task_factory_v2( + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id) + workflow = await workflow_factory(project_id=project.id) + task1 = await task_factory(user_id=user.id, version="3.1.4") + task2 = await task_factory( user_id=user.id, args_schema_parallel={"foo": "bar"}, version="1.2" ) debug(task1, task2) - wftask1 = await workflowtask_factory_v2( + wftask1 = await workflowtask_factory( workflow_id=workflow.id, task_id=task1.id ) - wftask2 = await workflowtask_factory_v2( + wftask2 = await workflowtask_factory( workflow_id=workflow.id, task_id=task2.id ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -442,25 +442,25 @@ def add_units(hr_id: int, quantity: int, status: HistoryUnitStatus): async def test_get_history_run_units( - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - dataset_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + task_factory, + dataset_factory, + workflowtask_factory, + job_factory, db, client, MockCurrentUser, ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id) - wftask = await workflowtask_factory_v2( + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id) + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id) + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -506,7 +506,7 @@ async def test_get_history_run_units( assert res.status_code == 404 # 422 - wftask2 = await workflowtask_factory_v2( + wftask2 = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id ) res = await client.get( @@ -514,7 +514,7 @@ async def test_get_history_run_units( f"?workflowtask_id={wftask2.id}&dataset_id={dataset.id}" ) assert res.status_code == 422 - dataset2 = await dataset_factory_v2(project_id=project.id) + dataset2 = await dataset_factory(project_id=project.id) res = await client.get( f"/api/v2/project/{project.id}/status/run/{hr.id}/units/" f"?workflowtask_id={wftask.id}&dataset_id={dataset2.id}" @@ -562,18 +562,18 @@ async def test_get_history_run_units( async def test_get_history_images( - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - dataset_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + task_factory, + dataset_factory, + workflowtask_factory, + job_factory, db, client, MockCurrentUser, ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) + project = await project_factory(user) images_x_no_y = [ SingleImage( @@ -599,18 +599,18 @@ async def test_get_history_images( ] dataset_images = images_x_no_y + images_x_and_y - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project.id, images=dataset_images ) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id, input_types={"y": True}) + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id, input_types={"y": True}) - wftask = await workflowtask_factory_v2( + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id, type_filters={"x": True}, ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, @@ -764,12 +764,12 @@ async def test_get_history_images( async def test_get_logs( - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - dataset_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + task_factory, + dataset_factory, + workflowtask_factory, + job_factory, db, tmp_path, client, @@ -782,16 +782,14 @@ async def test_get_logs( f.write(LOGS) async with MockCurrentUser() as user: - proj = await project_factory_v2(user) - ds = await dataset_factory_v2( + proj = await project_factory(user) + ds = await dataset_factory( project_id=proj.id, images=[dict(zarr_url=ZARR_URL)] ) - wf = await workflow_factory_v2(project_id=proj.id) - task = await task_factory_v2(user_id=user.id) - wftask = await workflowtask_factory_v2( - workflow_id=wf.id, task_id=task.id - ) - job = await job_factory_v2( + wf = await workflow_factory(project_id=proj.id) + task = await task_factory(user_id=user.id) + wftask = await workflowtask_factory(workflow_id=wf.id, task_id=task.id) + job = await job_factory( project_id=proj.id, dataset_id=ds.id, workflow_id=wf.id, @@ -861,10 +859,8 @@ async def test_get_logs( ) assert res.status_code == 422 assert "Invalid query parameters: HistoryUnit" in res.json()["detail"] - ds2 = await dataset_factory_v2(project_id=proj.id) - wftask2 = await workflowtask_factory_v2( - workflow_id=wf.id, task_id=task.id - ) + ds2 = await dataset_factory(project_id=proj.id) + wftask2 = await workflowtask_factory(workflow_id=wf.id, task_id=task.id) res = await client.get( f"/api/v2/project/{proj.id}/status/unit-log/" f"?workflowtask_id={wftask2.id}&dataset_id={ds.id}" @@ -882,24 +878,24 @@ async def test_get_logs( async def test_get_history_run_dataset( - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, + workflowtask_factory, + job_factory, db, client, MockCurrentUser, ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) + project = await project_factory(user) - dataset = await dataset_factory_v2(project_id=project.id) - wf = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id) - await workflowtask_factory_v2(workflow_id=wf.id, task_id=task.id) - job = await job_factory_v2( + dataset = await dataset_factory(project_id=project.id) + wf = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id) + await workflowtask_factory(workflow_id=wf.id, task_id=task.id) + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=wf.id, diff --git a/tests/v2/test_09_history/test_unit_api_aux_functions.py b/tests/v2/test_09_history/test_unit_api_aux_functions.py index 3b7e224d05..551b3b33fb 100644 --- a/tests/v2/test_09_history/test_unit_api_aux_functions.py +++ b/tests/v2/test_09_history/test_unit_api_aux_functions.py @@ -95,15 +95,15 @@ class MockWorkflowTask(BaseModel): async def test_verify_workflow_and_dataset_access( db, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, + project_factory, + workflow_factory, + dataset_factory, MockCurrentUser, ): async with MockCurrentUser() as user: - project1 = await project_factory_v2(user=user) - wf1 = await workflow_factory_v2(project_id=project1.id) - ds1 = await dataset_factory_v2(project_id=project1.id) + project1 = await project_factory(user=user) + wf1 = await workflow_factory(project_id=project1.id) + ds1 = await dataset_factory(project_id=project1.id) res = await _verify_workflow_and_dataset_access( project_id=project1.id, @@ -116,9 +116,9 @@ async def test_verify_workflow_and_dataset_access( assert res["dataset"].id == ds1.id assert res["workflow"].id == wf1.id - project2 = await project_factory_v2(user=user) - wf2 = await workflow_factory_v2(project_id=project2.id) - ds2 = await dataset_factory_v2(project_id=project2.id) + project2 = await project_factory(user=user) + wf2 = await workflow_factory(project_id=project2.id) + ds2 = await dataset_factory(project_id=project2.id) with pytest.raises(HTTPException, match="Workflow does not belong"): await _verify_workflow_and_dataset_access( diff --git a/tests/v2/test_09_history/test_unit_upsert.py b/tests/v2/test_09_history/test_unit_upsert.py index 54680d32ff..9398f7db7d 100644 --- a/tests/v2/test_09_history/test_unit_upsert.py +++ b/tests/v2/test_09_history/test_unit_upsert.py @@ -30,12 +30,12 @@ def bulk_upsert_image_cache_slow( ], ) async def test_upsert_function( - project_factory_v2, - workflow_factory_v2, - task_factory_v2, - dataset_factory_v2, - workflowtask_factory_v2, - job_factory_v2, + project_factory, + workflow_factory, + task_factory, + dataset_factory, + workflowtask_factory, + job_factory, db_sync, db, MockCurrentUser, @@ -43,15 +43,15 @@ async def test_upsert_function( num, ): async with MockCurrentUser() as user: - project = await project_factory_v2(user) - dataset = await dataset_factory_v2(project_id=project.id) - workflow = await workflow_factory_v2(project_id=project.id) - task = await task_factory_v2(user_id=user.id) + project = await project_factory(user) + dataset = await dataset_factory(project_id=project.id) + workflow = await workflow_factory(project_id=project.id) + task = await task_factory(user_id=user.id) - wftask = await workflowtask_factory_v2( + wftask = await workflowtask_factory( workflow_id=workflow.id, task_id=task.id ) - job = await job_factory_v2( + job = await job_factory( project_id=project.id, dataset_id=dataset.id, workflow_id=workflow.id, From 3a272c131b2cf69af2a9a1d5e30327278aa23489 Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Fri, 28 Nov 2025 16:15:07 +0100 Subject: [PATCH 05/13] fix test not api --- fractal_server/app/schemas/v2/manifest.py | 26 +++++++++---------- .../test_01_schemas/test_schemas_manifest.py | 10 +++---- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/fractal_server/app/schemas/v2/manifest.py b/fractal_server/app/schemas/v2/manifest.py index 6abfedb2c8..d079693f77 100644 --- a/fractal_server/app/schemas/v2/manifest.py +++ b/fractal_server/app/schemas/v2/manifest.py @@ -13,7 +13,7 @@ class TaskManifestV2(BaseModel): """ - Represents a task within a manifest. + Represents a task within a V2 manifest. Attributes: name: @@ -65,24 +65,24 @@ def validate_executable_args_meta(self): executable_parallel = self.executable_parallel if (executable_non_parallel is None) and (executable_parallel is None): raise ValueError( - "`TaskManifest.executable_non_parallel` and " - "`TaskManifest.executable_parallel` cannot be both None." + "`TaskManifestV2.executable_non_parallel` and " + "`TaskManifestV2.executable_parallel` cannot be both None." ) elif executable_non_parallel is None: meta_non_parallel = self.meta_non_parallel if meta_non_parallel != {}: raise ValueError( - "`TaskManifest.meta_non_parallel` must be an empty dict " - "if `TaskManifest.executable_non_parallel` is None. " + "`TaskManifestV2.meta_non_parallel` must be an empty dict " + "if `TaskManifestV2.executable_non_parallel` is None. " f"Given: {meta_non_parallel}." ) args_schema_non_parallel = self.args_schema_non_parallel if args_schema_non_parallel is not None: raise ValueError( - "`TaskManifest.args_schema_non_parallel` must be None " - "if `TaskManifest.executable_non_parallel` is None. " + "`TaskManifestV2.args_schema_non_parallel` must be None " + "if `TaskManifestV2.executable_non_parallel` is None. " f"Given: {args_schema_non_parallel}." ) @@ -90,16 +90,16 @@ def validate_executable_args_meta(self): meta_parallel = self.meta_parallel if meta_parallel != {}: raise ValueError( - "`TaskManifest.meta_parallel` must be an empty dict if " - "`TaskManifest.executable_parallel` is None. " + "`TaskManifestV2.meta_parallel` must be an empty dict if " + "`TaskManifestV2.executable_parallel` is None. " f"Given: {meta_parallel}." ) args_schema_parallel = self.args_schema_parallel if args_schema_parallel is not None: raise ValueError( - "`TaskManifest.args_schema_parallel` must be None if " - "`TaskManifest.executable_parallel` is None. " + "`TaskManifestV2.args_schema_parallel` must be None if " + "`TaskManifestV2.executable_parallel` is None. " f"Given: {args_schema_parallel}." ) @@ -145,14 +145,14 @@ def _check_args_schemas_are_present(self): if task.executable_parallel is not None: if task.args_schema_parallel is None: raise ValueError( - f"Manifest has {has_args_schemas=}, but " + f"ManifestV2 has {has_args_schemas=}, but " f"task '{task.name}' has " f"{task.args_schema_parallel=}." ) if task.executable_non_parallel is not None: if task.args_schema_non_parallel is None: raise ValueError( - f"Manifest has {has_args_schemas=}, but " + f"ManifestV2 has {has_args_schemas=}, but " f"task '{task.name}' has " f"{task.args_schema_non_parallel=}." ) diff --git a/tests/v2/test_01_schemas/test_schemas_manifest.py b/tests/v2/test_01_schemas/test_schemas_manifest.py index aaa2b6f3a9..9bb8ab19e0 100644 --- a/tests/v2/test_01_schemas/test_schemas_manifest.py +++ b/tests/v2/test_01_schemas/test_schemas_manifest.py @@ -145,7 +145,7 @@ def test_ManifestV2(): non_parallel_schema, ], ) - assert "Manifest has has_args_schemas=True" in msg(e) + assert "ManifestV2 has has_args_schemas=True" in msg(e) # 3: compound_just_parallel_schemas with pytest.raises(ValidationError) as e: @@ -158,7 +158,7 @@ def test_ManifestV2(): non_parallel_schema, ], ) - assert "Manifest has has_args_schemas=True" in msg(e) + assert "ManifestV2 has has_args_schemas=True" in msg(e) # 4: compound_no_schemas with pytest.raises(ValidationError) as e: @@ -171,7 +171,7 @@ def test_ManifestV2(): non_parallel_schema, ], ) - assert "Manifest has has_args_schemas=True" in msg(e) + assert "ManifestV2 has has_args_schemas=True" in msg(e) # 5: parallel_no_schema with pytest.raises(ValidationError) as e: @@ -184,7 +184,7 @@ def test_ManifestV2(): non_parallel_schema, ], ) - assert "Manifest has has_args_schemas=True" in msg(e) + assert "ManifestV2 has has_args_schemas=True" in msg(e) # 6: non_parallel_no_schema with pytest.raises(ValidationError) as e: @@ -197,7 +197,7 @@ def test_ManifestV2(): non_parallel_no_schema, ], ) - assert "Manifest has has_args_schemas=True" in msg(e) + assert "ManifestV2 has has_args_schemas=True" in msg(e) # 7: Non-unique task names with pytest.raises(ValidationError) as e: From 41a89577063dcd406ea6cc23c2fb0c6bb34d3d37 Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Fri, 28 Nov 2025 16:18:52 +0100 Subject: [PATCH 06/13] fix another test not api --- tests/v2/test_07_full_workflow/common_functions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/v2/test_07_full_workflow/common_functions.py b/tests/v2/test_07_full_workflow/common_functions.py index dda94e34ec..f128a09d69 100644 --- a/tests/v2/test_07_full_workflow/common_functions.py +++ b/tests/v2/test_07_full_workflow/common_functions.py @@ -539,7 +539,7 @@ def _raise_RuntimeError(*args, **kwargs): monkeypatch.setattr( fractal_server.runner.v2.runner, - "run_v2_task_non_parallel", + "run_task_non_parallel", _raise_RuntimeError, ) From 850fbca94df569df212765410554e5b7a186d49f Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Fri, 28 Nov 2025 16:39:38 +0100 Subject: [PATCH 07/13] fixture names leftovers --- fractal_server/runner/v2/runner_functions.py | 13 ++- .../test_07_full_workflow/common_functions.py | 88 +++++++++---------- .../test_full_workflow_local.py | 42 ++++----- .../test_full_workflow_slurm_ssh.py | 16 ++-- .../test_full_workflow_slurm_sudo.py | 28 +++--- 5 files changed, 90 insertions(+), 97 deletions(-) diff --git a/fractal_server/runner/v2/runner_functions.py b/fractal_server/runner/v2/runner_functions.py index 8d5f41ca19..8a5e4594ed 100644 --- a/fractal_server/runner/v2/runner_functions.py +++ b/fractal_server/runner/v2/runner_functions.py @@ -168,9 +168,7 @@ def run_task_non_parallel( TaskType.NON_PARALLEL, TaskType.CONVERTER_NON_PARALLEL, ]: - raise ValueError( - f"Invalid {task_type=} for `run_v2_task_non_parallel`." - ) + raise ValueError(f"Invalid {task_type=} for `run_task_non_parallel`.") # Get TaskFiles object task_files = TaskFiles( @@ -213,7 +211,7 @@ def run_task_non_parallel( db.commit() db.refresh(history_unit) logger.debug( - "[run_v2_task_non_parallel] Created `HistoryUnit` with " + "[run_task_non_parallel] Created `HistoryUnit` with " f"{history_run_id=}." ) history_unit_id = history_unit.id @@ -326,7 +324,7 @@ def run_task_parallel( db.add_all(history_units) db.commit() logger.debug( - f"[run_v2_task_non_parallel] Created {len(history_units)} " + f"[run_task_non_parallel] Created {len(history_units)} " "`HistoryUnit`s." ) @@ -445,7 +443,7 @@ def run_task_compound( db.refresh(history_unit) init_history_unit_id = history_unit.id logger.debug( - "[run_v2_task_compound] Created `HistoryUnit` with " + "[run_task_compound] Created `HistoryUnit` with " f"{init_history_unit_id=}." ) # Create one `HistoryImageCache` for each input image @@ -557,8 +555,7 @@ def run_task_compound( for history_unit in history_units: db.refresh(history_unit) logger.debug( - f"[run_v2_task_compound] Created {len(history_units)} " - "`HistoryUnit`s." + f"[run_task_compound] Created {len(history_units)} `HistoryUnit`s." ) history_unit_ids = [history_unit.id for history_unit in history_units] diff --git a/tests/v2/test_07_full_workflow/common_functions.py b/tests/v2/test_07_full_workflow/common_functions.py index f128a09d69..6c0d3474f8 100644 --- a/tests/v2/test_07_full_workflow/common_functions.py +++ b/tests/v2/test_07_full_workflow/common_functions.py @@ -29,9 +29,9 @@ async def full_workflow( *, MockCurrentUser, client, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, + project_factory, + workflow_factory, + dataset_factory, tasks: dict[str, TaskV2], resource_id: int, user_kwargs: dict | None = None, @@ -42,14 +42,14 @@ async def full_workflow( async with MockCurrentUser( user_kwargs={"is_verified": True, **user_kwargs} ) as user: - project = await project_factory_v2(user) + project = await project_factory(user) project_id = project.id - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project_id, name="dataset", ) dataset_id = dataset.id - workflow = await workflow_factory_v2( + workflow = await workflow_factory( project_id=project_id, name="workflow" ) workflow_id = workflow.id @@ -307,9 +307,9 @@ async def full_workflow_TaskExecutionError( *, MockCurrentUser, client, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, + project_factory, + workflow_factory, + dataset_factory, tasks: dict[str, TaskV2], resource_id: int, user_kwargs: dict | None = None, @@ -321,14 +321,14 @@ async def full_workflow_TaskExecutionError( async with MockCurrentUser( user_kwargs={"is_verified": True, **user_kwargs} ) as user: - project = await project_factory_v2(user) + project = await project_factory(user) project_id = project.id - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project_id, name="dataset", ) dataset_id = dataset.id - workflow = await workflow_factory_v2( + workflow = await workflow_factory( project_id=project_id, name="workflow" ) workflow_id = workflow.id @@ -411,10 +411,10 @@ async def non_executable_task_command( MockCurrentUser, client, testdata_path, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, - task_factory_v2, + project_factory, + workflow_factory, + dataset_factory, + task_factory, resource_id: int, user_kwargs: dict | None = None, ): @@ -425,7 +425,7 @@ async def non_executable_task_command( user_kwargs={"is_verified": True, **user_kwargs}, ) as user: # Create task - task = await task_factory_v2( + task = await task_factory( user_id=user.id, name="invalid-task-command", type="non_parallel", @@ -434,13 +434,11 @@ async def non_executable_task_command( debug(task) # Create project - project = await project_factory_v2(user) + project = await project_factory(user) project_id = project.id # Create workflow - workflow = await workflow_factory_v2( - name="test_wf", project_id=project_id - ) + workflow = await workflow_factory(name="test_wf", project_id=project_id) # Add task to workflow res = await client.post( @@ -452,7 +450,7 @@ async def non_executable_task_command( assert res.status_code == 201 # Create dataset - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project_id, name="input", zarr_dir="/fake", @@ -485,10 +483,10 @@ async def failing_workflow_UnknownError( MockCurrentUser, client, monkeypatch, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, resource_id: int, user_kwargs: dict | None = None, ): @@ -499,22 +497,22 @@ async def failing_workflow_UnknownError( async with MockCurrentUser( user_kwargs={"is_verified": True, **user_kwargs} ) as user: - project = await project_factory_v2(user) + project = await project_factory(user) project_id = project.id - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project_id, name="dataset", zarr_dir="/fake", images=[dict(zarr_url="/fake/1")], ) dataset_id = dataset.id - workflow = await workflow_factory_v2( + workflow = await workflow_factory( project_id=project_id, name="workflow" ) workflow_id = workflow.id # Create task - task = await task_factory_v2( + task = await task_factory( user_id=user.id, command_non_parallel="echo", type="non_parallel", @@ -590,10 +588,10 @@ async def workflow_with_non_python_task( MockCurrentUser, client, testdata_path, - project_factory_v2, - dataset_factory_v2, - workflow_factory_v2, - task_factory_v2, + project_factory, + dataset_factory, + workflow_factory, + task_factory, tmp777_path: Path, resource_id: int, additional_user_kwargs=None, @@ -613,13 +611,11 @@ async def workflow_with_non_python_task( async with MockCurrentUser(user_kwargs=user_kwargs) as user: # Create project - project = await project_factory_v2(user) + project = await project_factory(user) project_id = project.id # Create workflow - workflow = await workflow_factory_v2( - name="test_wf", project_id=project_id - ) + workflow = await workflow_factory(name="test_wf", project_id=project_id) # Copy script somewhere accessible script_name = "non_python_task_issue1377.sh" @@ -630,7 +626,7 @@ async def workflow_with_non_python_task( ) # Create task - task = await task_factory_v2( + task = await task_factory( user_id=user.id, name="non-python", type="non_parallel", @@ -646,7 +642,7 @@ async def workflow_with_non_python_task( assert res.status_code == 201 # Create datasets - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project_id, name="dataset", zarr_dir="/fake", @@ -702,9 +698,9 @@ async def failing_workflow_post_task_execution( *, MockCurrentUser, client, - project_factory_v2, - workflow_factory_v2, - dataset_factory_v2, + project_factory, + workflow_factory, + dataset_factory, tasks: dict[str, TaskV2], resource_id: int, user_kwargs: dict | None = None, @@ -716,7 +712,7 @@ async def failing_workflow_post_task_execution( async with MockCurrentUser( user_kwargs={"is_verified": True, **user_kwargs}, ) as user: - project = await project_factory_v2( + project = await project_factory( user, resource_id=resource_id, ) @@ -724,7 +720,7 @@ async def failing_workflow_post_task_execution( zarr_dir = (tmp_path / "zarr_dir").as_posix().rstrip("/") - dataset = await dataset_factory_v2( + dataset = await dataset_factory( project_id=project_id, name="dataset", zarr_dir=zarr_dir, @@ -734,7 +730,7 @@ async def failing_workflow_post_task_execution( ], ) dataset_id = dataset.id - workflow = await workflow_factory_v2( + workflow = await workflow_factory( project_id=project_id, name="workflow" ) workflow_id = workflow.id diff --git a/tests/v2/test_07_full_workflow/test_full_workflow_local.py b/tests/v2/test_07_full_workflow/test_full_workflow_local.py index e7206a0ce5..bbea429558 100644 --- a/tests/v2/test_07_full_workflow/test_full_workflow_local.py +++ b/tests/v2/test_07_full_workflow/test_full_workflow_local.py @@ -33,9 +33,9 @@ async def test_full_workflow_local( resource, profile = local_resource_profile_db await full_workflow( MockCurrentUser=MockCurrentUser, - project_factory_v2=project_factory, - dataset_factory_v2=dataset_factory, - workflow_factory_v2=workflow_factory, + project_factory=project_factory, + dataset_factory=dataset_factory, + workflow_factory=workflow_factory, client=client, tasks=fractal_tasks_mock_db, user_kwargs=dict( @@ -64,9 +64,9 @@ async def test_full_workflow_TaskExecutionError( resource, profile = local_resource_profile_db await full_workflow_TaskExecutionError( MockCurrentUser=MockCurrentUser, - project_factory_v2=project_factory, - dataset_factory_v2=dataset_factory, - workflow_factory_v2=workflow_factory, + project_factory=project_factory, + dataset_factory=dataset_factory, + workflow_factory=workflow_factory, client=client, tasks=fractal_tasks_mock_db, user_kwargs=dict(profile_id=profile.id), @@ -95,10 +95,10 @@ async def test_non_executable_task_command_local( MockCurrentUser=MockCurrentUser, client=client, testdata_path=testdata_path, - project_factory_v2=project_factory, - workflow_factory_v2=workflow_factory, - dataset_factory_v2=dataset_factory, - task_factory_v2=task_factory, + project_factory=project_factory, + workflow_factory=workflow_factory, + dataset_factory=dataset_factory, + task_factory=task_factory, user_kwargs=dict( profile_id=profile.id, ), @@ -128,10 +128,10 @@ async def test_failing_workflow_UnknownError_local( MockCurrentUser=MockCurrentUser, client=client, monkeypatch=monkeypatch, - project_factory_v2=project_factory, - dataset_factory_v2=dataset_factory, - workflow_factory_v2=workflow_factory, - task_factory_v2=task_factory, + project_factory=project_factory, + dataset_factory=dataset_factory, + workflow_factory=workflow_factory, + task_factory=task_factory, user_kwargs=dict(profile_id=profile.id), resource_id=resource.id, ) @@ -161,10 +161,10 @@ async def test_non_python_task_local( await workflow_with_non_python_task( client=client, MockCurrentUser=MockCurrentUser, - project_factory_v2=project_factory, - dataset_factory_v2=dataset_factory, - workflow_factory_v2=workflow_factory, - task_factory_v2=task_factory, + project_factory=project_factory, + dataset_factory=dataset_factory, + workflow_factory=workflow_factory, + task_factory=task_factory, testdata_path=testdata_path, tmp777_path=tmp777_path, additional_user_kwargs=dict(profile_id=profile.id), @@ -188,9 +188,9 @@ async def test_failing_workflow_post_task_execution( await failing_workflow_post_task_execution( MockCurrentUser=MockCurrentUser, - project_factory_v2=project_factory, - dataset_factory_v2=dataset_factory, - workflow_factory_v2=workflow_factory, + project_factory=project_factory, + dataset_factory=dataset_factory, + workflow_factory=workflow_factory, client=client, tasks=fractal_tasks_mock_db, tmp_path=tmp_path, diff --git a/tests/v2/test_07_full_workflow/test_full_workflow_slurm_ssh.py b/tests/v2/test_07_full_workflow/test_full_workflow_slurm_ssh.py index 117ea0c834..6a567a5b34 100644 --- a/tests/v2/test_07_full_workflow/test_full_workflow_slurm_ssh.py +++ b/tests/v2/test_07_full_workflow/test_full_workflow_slurm_ssh.py @@ -36,10 +36,10 @@ async def test_workflow_with_non_python_task_slurm_ssh( resource_id=resource.id, client=client, testdata_path=testdata_path, - project_factory_v2=project_factory, - dataset_factory_v2=dataset_factory, - workflow_factory_v2=workflow_factory, - task_factory_v2=task_factory, + project_factory=project_factory, + dataset_factory=dataset_factory, + workflow_factory=workflow_factory, + task_factory=task_factory, tmp777_path=tmp777_path, ) @@ -86,10 +86,10 @@ async def test_workflow_with_non_python_task_slurm_ssh_fail( ), resource_id=resource.id, testdata_path=testdata_path, - project_factory_v2=project_factory, - dataset_factory_v2=dataset_factory, - workflow_factory_v2=workflow_factory, - task_factory_v2=task_factory, + project_factory=project_factory, + dataset_factory=dataset_factory, + workflow_factory=workflow_factory, + task_factory=task_factory, tmp777_path=tmp777_path, this_should_fail=True, ) diff --git a/tests/v2/test_07_full_workflow/test_full_workflow_slurm_sudo.py b/tests/v2/test_07_full_workflow/test_full_workflow_slurm_sudo.py index 1a961bbdd8..8a6d5059a6 100644 --- a/tests/v2/test_07_full_workflow/test_full_workflow_slurm_sudo.py +++ b/tests/v2/test_07_full_workflow/test_full_workflow_slurm_sudo.py @@ -74,9 +74,9 @@ async def test_full_workflow_slurm( project_dirs=[project_dir], ), resource_id=resource.id, - project_factory_v2=project_factory, - dataset_factory_v2=dataset_factory, - workflow_factory_v2=workflow_factory, + project_factory=project_factory, + dataset_factory=dataset_factory, + workflow_factory=workflow_factory, client=client, tasks=fractal_tasks_mock_db, ) @@ -131,9 +131,9 @@ async def test_full_workflow_TaskExecutionError_slurm( project_dirs=[project_dir], ), resource_id=resource.id, - project_factory_v2=project_factory, - dataset_factory_v2=dataset_factory, - workflow_factory_v2=workflow_factory, + project_factory=project_factory, + dataset_factory=dataset_factory, + workflow_factory=workflow_factory, client=client, tasks=fractal_tasks_mock_db, ) @@ -175,10 +175,10 @@ async def test_non_executable_task_command_slurm( resource_id=resource.id, client=client, testdata_path=testdata_path, - project_factory_v2=project_factory, - workflow_factory_v2=workflow_factory, - dataset_factory_v2=dataset_factory, - task_factory_v2=task_factory, + project_factory=project_factory, + workflow_factory=workflow_factory, + dataset_factory=dataset_factory, + task_factory=task_factory, ) _reset_permissions_for_user_folder(project_dir) @@ -219,10 +219,10 @@ async def test_failing_workflow_UnknownError_slurm( resource_id=resource.id, client=client, monkeypatch=monkeypatch, - project_factory_v2=project_factory, - dataset_factory_v2=dataset_factory, - workflow_factory_v2=workflow_factory, - task_factory_v2=task_factory, + project_factory=project_factory, + dataset_factory=dataset_factory, + workflow_factory=workflow_factory, + task_factory=task_factory, ) _reset_permissions_for_user_folder(project_dir) From ace7225c89d4d90a4bdb6ab8d64705d1e1aa89ae Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Fri, 28 Nov 2025 16:56:34 +0100 Subject: [PATCH 08/13] grep all '_v2' and 'jobsV2' --- .../app/routes/admin/v2/__init__.py | 22 ++--- fractal_server/app/routes/api/v2/__init__.py | 96 +++++++++---------- fractal_server/app/routes/api/v2/submit.py | 12 +-- fractal_server/app/shutdown.py | 6 +- fractal_server/config/_main.py | 2 +- fractal_server/main.py | 14 ++- scripts/client/client.py | 3 +- tests/conftest.py | 4 +- ...res_server_v2.py => fixtures_factories.py} | 0 tests/fixtures_server.py | 2 +- ...fixtures_tasks_v2.py => fixtures_tasks.py} | 0 tests/no_version/test_unit_lifespan.py | 14 +-- tests/v2/test_03_api/test_api_project.py | 2 +- .../test_submission_job_list_v2.py | 8 +- .../{execute_tasks_v2.py => execute_tasks.py} | 2 +- .../v2/test_04_runner/test_dummy_examples.py | 22 ++--- .../test_04_runner/test_fractal_examples.py | 2 +- .../test_no_images_parallelization.py | 2 +- .../test_collect_local.py | 16 ++-- .../test_collect_pixi_local.py | 8 +- .../test_deactivate_local.py | 18 ++-- .../test_deactivate_ssh.py | 18 ++-- .../test_reactivate_local.py | 8 +- .../test_reactivate_ssh.py | 8 +- 24 files changed, 141 insertions(+), 148 deletions(-) rename tests/{fixtures_server_v2.py => fixtures_factories.py} (100%) rename tests/{fixtures_tasks_v2.py => fixtures_tasks.py} (100%) rename tests/v2/test_04_runner/{execute_tasks_v2.py => execute_tasks.py} (91%) diff --git a/fractal_server/app/routes/admin/v2/__init__.py b/fractal_server/app/routes/admin/v2/__init__.py index 5b42be5486..a31a97e696 100644 --- a/fractal_server/app/routes/admin/v2/__init__.py +++ b/fractal_server/app/routes/admin/v2/__init__.py @@ -14,16 +14,14 @@ from .task_group import router as task_group_router from .task_group_lifecycle import router as task_group_lifecycle_router -router_admin_v2 = APIRouter() +router_admin = APIRouter() -router_admin_v2.include_router(accounting_router, prefix="/accounting") -router_admin_v2.include_router(job_router, prefix="/job") -router_admin_v2.include_router(task_router, prefix="/task") -router_admin_v2.include_router(task_group_router, prefix="/task-group") -router_admin_v2.include_router( - task_group_lifecycle_router, prefix="/task-group" -) -router_admin_v2.include_router(impersonate_router, prefix="/impersonate") -router_admin_v2.include_router(resource_router, prefix="/resource") -router_admin_v2.include_router(profile_router, prefix="/profile") -router_admin_v2.include_router(sharing_router, prefix="/linkuserproject") +router_admin.include_router(accounting_router, prefix="/accounting") +router_admin.include_router(job_router, prefix="/job") +router_admin.include_router(task_router, prefix="/task") +router_admin.include_router(task_group_router, prefix="/task-group") +router_admin.include_router(task_group_lifecycle_router, prefix="/task-group") +router_admin.include_router(impersonate_router, prefix="/impersonate") +router_admin.include_router(resource_router, prefix="/resource") +router_admin.include_router(profile_router, prefix="/profile") +router_admin.include_router(sharing_router, prefix="/linkuserproject") diff --git a/fractal_server/app/routes/api/v2/__init__.py b/fractal_server/app/routes/api/v2/__init__.py index a006487865..32bc790918 100644 --- a/fractal_server/app/routes/api/v2/__init__.py +++ b/fractal_server/app/routes/api/v2/__init__.py @@ -7,68 +7,66 @@ from fractal_server.config import get_settings from fractal_server.syringe import Inject -from .dataset import router as dataset_router_v2 -from .history import router as history_router_v2 -from .images import router as images_routes_v2 -from .job import router as job_router_v2 +from .dataset import router as dataset_router +from .history import router as history_router +from .images import router as images_routes +from .job import router as job_router from .pre_submission_checks import router as pre_submission_checks_router -from .project import router as project_router_v2 -from .sharing import router as sharing_router_v2 -from .status_legacy import router as status_legacy_router_v2 -from .submit import router as submit_job_router_v2 -from .task import router as task_router_v2 -from .task_collection import router as task_collection_router_v2 -from .task_collection_custom import router as task_collection_router_v2_custom -from .task_collection_pixi import router as task_collection_pixi_router_v2 -from .task_group import router as task_group_router_v2 -from .task_group_lifecycle import router as task_group_lifecycle_router_v2 -from .task_version_update import router as task_version_update_router_v2 -from .workflow import router as workflow_router_v2 -from .workflow_import import router as workflow_import_router_v2 -from .workflowtask import router as workflowtask_router_v2 +from .project import router as project_router +from .sharing import router as sharing_router +from .status_legacy import router as status_legacy_router +from .submit import router as submit_job_router +from .task import router as task_router +from .task_collection import router as task_collection_router +from .task_collection_custom import router as task_collection_router_custom +from .task_collection_pixi import router as task_collection_pixi_router +from .task_group import router as task_group_router +from .task_group_lifecycle import router as task_group_lifecycle_router +from .task_version_update import router as task_version_update_router +from .workflow import router as workflow_router +from .workflow_import import router as workflow_import_router +from .workflowtask import router as workflowtask_router -router_api_v2 = APIRouter() +router_api = APIRouter() -router_api_v2.include_router(dataset_router_v2, tags=["V2 Dataset"]) -router_api_v2.include_router(pre_submission_checks_router, tags=["V2 Job"]) -router_api_v2.include_router(job_router_v2, tags=["V2 Job"]) -router_api_v2.include_router(images_routes_v2, tags=["V2 Images"]) -router_api_v2.include_router(sharing_router_v2, tags=["Project Sharing"]) -router_api_v2.include_router(project_router_v2, tags=["V2 Project"]) -router_api_v2.include_router(submit_job_router_v2, tags=["V2 Job"]) -router_api_v2.include_router(history_router_v2, tags=["V2 History"]) -router_api_v2.include_router(status_legacy_router_v2, tags=["V2 Status Legacy"]) +router_api.include_router(dataset_router, tags=["Dataset"]) +router_api.include_router(pre_submission_checks_router, tags=["Job"]) +router_api.include_router(job_router, tags=["Job"]) +router_api.include_router(images_routes, tags=["Images"]) +router_api.include_router(sharing_router, tags=["Project Sharing"]) +router_api.include_router(project_router, tags=["Project"]) +router_api.include_router(submit_job_router, tags=["Job"]) +router_api.include_router(history_router, tags=["History"]) +router_api.include_router(status_legacy_router, tags=["Status Legacy"]) settings = Inject(get_settings) -router_api_v2.include_router( - task_collection_router_v2, +router_api.include_router( + task_collection_router, prefix="/task", - tags=["V2 Task Lifecycle"], + tags=["Task Lifecycle"], ) -router_api_v2.include_router( - task_collection_router_v2_custom, +router_api.include_router( + task_collection_router_custom, prefix="/task", - tags=["V2 Task Lifecycle"], + tags=["Task Lifecycle"], ) -router_api_v2.include_router( - task_collection_pixi_router_v2, +router_api.include_router( + task_collection_pixi_router, prefix="/task", - tags=["V2 Task Lifecycle"], + tags=["Task Lifecycle"], ) -router_api_v2.include_router( - task_group_lifecycle_router_v2, +router_api.include_router( + task_group_lifecycle_router, prefix="/task-group", - tags=["V2 Task Lifecycle"], + tags=["Task Lifecycle"], ) -router_api_v2.include_router(task_router_v2, prefix="/task", tags=["V2 Task"]) -router_api_v2.include_router(task_version_update_router_v2, tags=["V2 Task"]) -router_api_v2.include_router( - task_group_router_v2, prefix="/task-group", tags=["V2 TaskGroup"] +router_api.include_router(task_router, prefix="/task", tags=["Task"]) +router_api.include_router(task_version_update_router, tags=["Task"]) +router_api.include_router( + task_group_router, prefix="/task-group", tags=["TaskGroup"] ) -router_api_v2.include_router(workflow_router_v2, tags=["V2 Workflow"]) -router_api_v2.include_router( - workflow_import_router_v2, tags=["V2 Workflow Import"] -) -router_api_v2.include_router(workflowtask_router_v2, tags=["V2 WorkflowTask"]) +router_api.include_router(workflow_router, tags=["Workflow"]) +router_api.include_router(workflow_import_router, tags=["Workflow Import"]) +router_api.include_router(workflowtask_router, tags=["WorkflowTask"]) diff --git a/fractal_server/app/routes/api/v2/submit.py b/fractal_server/app/routes/api/v2/submit.py index 04d351d471..d806d8dafd 100644 --- a/fractal_server/app/routes/api/v2/submit.py +++ b/fractal_server/app/routes/api/v2/submit.py @@ -63,13 +63,13 @@ async def apply_workflow( ) -> JobRead | None: # Remove non-submitted V2 jobs from the app state when the list grows # beyond a threshold - # NOTE: this may lead to a race condition on `app.state.jobsV2` if two + # NOTE: this may lead to a race condition on `app.state.jobs` if two # requests take place at the same time and `clean_app_job_list_v2` is # somewhat slow. settings = Inject(get_settings) - if len(request.app.state.jobsV2) > settings.FRACTAL_API_MAX_JOB_LIST_LENGTH: - new_jobs_list = await clean_app_job_list(db, request.app.state.jobsV2) - request.app.state.jobsV2 = new_jobs_list + if len(request.app.state.jobs) > settings.FRACTAL_API_MAX_JOB_LIST_LENGTH: + new_jobs_list = await clean_app_job_list(db, request.app.state.jobs) + request.app.state.jobs = new_jobs_list output = await _get_dataset_check_access( project_id=project_id, @@ -266,11 +266,11 @@ async def apply_workflow( resource=resource, profile=profile, ) - request.app.state.jobsV2.append(job.id) + request.app.state.jobs.append(job.id) logger.info( f"Current worker's pid is {os.getpid()}. " f"Current status of worker job's list " - f"{request.app.state.jobsV2}" + f"{request.app.state.jobs}" ) await db.close() return job diff --git a/fractal_server/app/shutdown.py b/fractal_server/app/shutdown.py index 9caf488772..adb3de9b64 100644 --- a/fractal_server/app/shutdown.py +++ b/fractal_server/app/shutdown.py @@ -11,18 +11,18 @@ from fractal_server.syringe import Inject -async def cleanup_after_shutdown(*, jobsV2: list[int], logger_name: str): +async def cleanup_after_shutdown(*, jobs: list[int], logger_name: str): settings = Inject(get_settings) logger = get_logger(logger_name) logger.info("Cleanup function after shutdown") stm_objects = ( select(JobV2) - .where(JobV2.id.in_(jobsV2)) + .where(JobV2.id.in_(jobs)) .where(JobV2.status == JobStatusType.SUBMITTED) ) stm_ids = ( select(JobV2.id) - .where(JobV2.id.in_(jobsV2)) + .where(JobV2.id.in_(jobs)) .where(JobV2.status == JobStatusType.SUBMITTED) ) diff --git a/fractal_server/config/_main.py b/fractal_server/config/_main.py index b93327895e..e93124d7da 100644 --- a/fractal_server/config/_main.py +++ b/fractal_server/config/_main.py @@ -28,7 +28,7 @@ class Settings(BaseSettings): Only logs of with this level (or higher) will appear in the console logs. FRACTAL_API_MAX_JOB_LIST_LENGTH: - Number of ids that can be stored in the `jobsV2` attribute of + Number of ids that can be stored in the `jobs` attribute of `app.state`. FRACTAL_GRACEFUL_SHUTDOWN_TIME: Waiting time for the shutdown phase of executors, in seconds. diff --git a/fractal_server/main.py b/fractal_server/main.py index a16a056727..5fa3bf9f7b 100644 --- a/fractal_server/main.py +++ b/fractal_server/main.py @@ -33,16 +33,14 @@ def collect_routers(app: FastAPI) -> None: app: The application to register the routers to. """ - from .app.routes.admin.v2 import router_admin_v2 + from .app.routes.admin.v2 import router_admin from .app.routes.api import router_api - from .app.routes.api.v2 import router_api_v2 + from .app.routes.api.v2 import router_api as router_api_v2 from .app.routes.auth.router import router_auth app.include_router(router_api, prefix="/api") app.include_router(router_api_v2, prefix="/api/v2") - app.include_router( - router_admin_v2, prefix="/admin/v2", tags=["V2 Admin area"] - ) + app.include_router(router_admin, prefix="/admin/v2", tags=["Admin area"]) app.include_router(router_auth, prefix="/auth", tags=["Authentication"]) @@ -74,7 +72,7 @@ def check_settings() -> None: @asynccontextmanager async def lifespan(app: FastAPI): - app.state.jobsV2 = [] + app.state.jobs = [] logger = set_logger("fractal_server.lifespan") logger.info(f"[startup] START (fractal-server {__VERSION__})") check_settings() @@ -111,12 +109,12 @@ async def lifespan(app: FastAPI): logger.info( f"[teardown] Current worker with pid {os.getpid()} is shutting down. " - f"Current jobs: {app.state.jobsV2=}" + f"Current jobs: {app.state.jobs=}" ) if _backend_supports_shutdown(settings.FRACTAL_RUNNER_BACKEND): try: await cleanup_after_shutdown( - jobsV2=app.state.jobsV2, + jobs=app.state.jobs, logger_name="fractal_server.lifespan", ) except Exception as e: diff --git a/scripts/client/client.py b/scripts/client/client.py index a864e0d998..51f7fef58c 100644 --- a/scripts/client/client.py +++ b/scripts/client/client.py @@ -33,8 +33,7 @@ wsgi_app = ASGIMiddleware(app) -wsgi_app.app.state.jobsV1 = [] -wsgi_app.app.state.jobsV2 = [] +wsgi_app.app.state.jobs = [] wsgi_app.app.state.fractal_ssh_list = None diff --git a/tests/conftest.py b/tests/conftest.py index 1d865053e0..25a3c821a1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -43,8 +43,8 @@ def current_py_version() -> str: from .fixtures_server import * # noqa F403 -from .fixtures_server_v2 import * # noqa F403 -from .fixtures_tasks_v2 import * # noqa F403 +from .fixtures_factories import * # noqa F403 +from .fixtures_tasks import * # noqa F403 from .fixtures_docker import * # noqa F403 from .fixtures_slurm import * # noqa F403 from .fixtures_pixi import * # noqa F403 diff --git a/tests/fixtures_server_v2.py b/tests/fixtures_factories.py similarity index 100% rename from tests/fixtures_server_v2.py rename to tests/fixtures_factories.py diff --git a/tests/fixtures_server.py b/tests/fixtures_server.py index 36d9276f3f..73d75ed048 100644 --- a/tests/fixtures_server.py +++ b/tests/fixtures_server.py @@ -176,7 +176,7 @@ async def db_sync(db_create_tables): @pytest.fixture def app() -> Generator[FastAPI, Any]: app = FastAPI() - app.state.jobsV2 = [] + app.state.jobs = [] app.state.fractal_ssh_list = None yield app diff --git a/tests/fixtures_tasks_v2.py b/tests/fixtures_tasks.py similarity index 100% rename from tests/fixtures_tasks_v2.py rename to tests/fixtures_tasks.py diff --git a/tests/no_version/test_unit_lifespan.py b/tests/no_version/test_unit_lifespan.py index 552317c436..2ec5f003f3 100644 --- a/tests/no_version/test_unit_lifespan.py +++ b/tests/no_version/test_unit_lifespan.py @@ -8,7 +8,7 @@ from fractal_server.app.models.security import UserOAuth from fractal_server.app.models.v2.job import JobV2 from fractal_server.app.routes.api.v2._aux_functions import ( - _workflow_insert_task as _workflow_insert_task_v2, + _workflow_insert_task, ) from fractal_server.app.schemas.v2 import ResourceType from fractal_server.app.security import _create_first_group @@ -55,13 +55,13 @@ async def test_app_with_lifespan( async with lifespan(app): # verify shutdown - assert len(app.state.jobsV2) == 0 + assert len(app.state.jobs) == 0 task = await task_factory(user_id=user.id, name="task", command="echo") project = await project_factory(user) workflow = await workflow_factory(project_id=project.id) dataset1 = await dataset_factory(project_id=project.id, name="ds-1") - await _workflow_insert_task_v2( + await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) # Create jobv2 with submitted status @@ -74,8 +74,8 @@ async def test_app_with_lifespan( last_task_index=0, ) - # append submitted job to jobsV2 status - app.state.jobsV2.append(jobv2.id) + # append submitted job to jobs status + app.state.jobs.append(jobv2.id) # we need to close the db session to get # updated data from db @@ -115,7 +115,7 @@ async def test_lifespan_shutdown_raise_error( ): # mock function to trigger except - async def raise_error(*, jobsV2: list[int], logger_name: str): + async def raise_error(*, jobs: list[int], logger_name: str): raise ValueError("ERROR") monkeypatch.setattr( @@ -141,6 +141,6 @@ async def test_lifespan_slurm_ssh(override_settings_factory, db): override_settings_factory(FRACTAL_RUNNER_BACKEND=ResourceType.SLURM_SSH) app = FastAPI() async with lifespan(app): - assert len(app.state.jobsV2) == 0 + assert len(app.state.jobs) == 0 assert isinstance(app.state.fractal_ssh_list, FractalSSHList) assert app.state.fractal_ssh_list.size == 0 diff --git a/tests/v2/test_03_api/test_api_project.py b/tests/v2/test_03_api/test_api_project.py index 4549168714..d3dac842da 100644 --- a/tests/v2/test_03_api/test_api_project.py +++ b/tests/v2/test_03_api/test_api_project.py @@ -31,7 +31,7 @@ async def test_post_and_get_project( local_resource_profile_db, ): resource, profile = local_resource_profile_db - PAYLOAD = dict(name="project_v2") + PAYLOAD = dict(name="project") # unauthenticated res = await client.post(f"{PREFIX}/project/", json=PAYLOAD) diff --git a/tests/v2/test_03_api/test_submission_job_list_v2.py b/tests/v2/test_03_api/test_submission_job_list_v2.py index 43b12cf7f6..022c35d977 100644 --- a/tests/v2/test_03_api/test_submission_job_list_v2.py +++ b/tests/v2/test_03_api/test_submission_job_list_v2.py @@ -18,7 +18,7 @@ async def test_clean_app_job_list( local_resource_profile_db, ): # Check that app fixture starts in a clean state - assert app.state.jobsV2 == [] + assert app.state.jobs == [] # Set this to 0 so that the endpoint also calls the clean-up function override_settings_factory(FRACTAL_API_MAX_JOB_LIST_LENGTH=0) @@ -52,7 +52,7 @@ async def test_clean_app_job_list( working_dir="/somewhere", ) job1_id = job1.id - app.state.jobsV2.append(job1_id) + app.state.jobs.append(job1_id) # Submit a second job via API res = await client.post( @@ -64,8 +64,8 @@ async def test_clean_app_job_list( job2_id = res.json()["id"] # Before clean-up, both jobs are listed - assert app.state.jobsV2 == [job1_id, job2_id] + assert app.state.jobs == [job1_id, job2_id] # After clean-up, only the submitted job is left - jobs_list = await clean_app_job_list(db, app.state.jobsV2) + jobs_list = await clean_app_job_list(db, app.state.jobs) assert jobs_list == [job1_id] diff --git a/tests/v2/test_04_runner/execute_tasks_v2.py b/tests/v2/test_04_runner/execute_tasks.py similarity index 91% rename from tests/v2/test_04_runner/execute_tasks_v2.py rename to tests/v2/test_04_runner/execute_tasks.py index 73705d286c..d9b746db2b 100644 --- a/tests/v2/test_04_runner/execute_tasks_v2.py +++ b/tests/v2/test_04_runner/execute_tasks.py @@ -17,7 +17,7 @@ def execute_tasks_mod( **kwargs, ) -> None: """ - This is a version of `execute_tasks_v2` with some defaults pre-filled. + This is a version of `execute_tasks` with some defaults pre-filled. """ execute_tasks( wf_task_list=wf_task_list, diff --git a/tests/v2/test_04_runner/test_dummy_examples.py b/tests/v2/test_04_runner/test_dummy_examples.py index 5801920b7f..541dc0aefb 100644 --- a/tests/v2/test_04_runner/test_dummy_examples.py +++ b/tests/v2/test_04_runner/test_dummy_examples.py @@ -18,7 +18,7 @@ from fractal_server.urls import normalize_url from .aux_get_dataset_attrs import _get_dataset_attrs -from .execute_tasks_v2 import execute_tasks_mod +from .execute_tasks import execute_tasks_mod async def _find_last_history_unit(db: AsyncSession) -> HistoryUnit: @@ -115,7 +115,7 @@ async def test_dummy_insert_single_image( resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs={"profile_id": profile.id}) as user: - execute_tasks_v2_args = dict( + execute_tasks_args = dict( runner=local_runner, user_id=user.id, ) @@ -142,7 +142,7 @@ async def test_dummy_insert_single_image( dataset=dataset, workflow_dir_local=tmp_path / "job0", job_id=job.id, - **execute_tasks_v2_args, + **execute_tasks_args, ) # Case 1: Run successfully even if the image already exists @@ -153,7 +153,7 @@ async def test_dummy_insert_single_image( dataset=dataset, workflow_dir_local=tmp_path / "job1", job_id=job.id, - **execute_tasks_v2_args, + **execute_tasks_args, ) # Case 2: Run successfully even if the image already exists but the new @@ -203,7 +203,7 @@ async def test_dummy_insert_single_image( dataset=dataset_case_2, workflow_dir_local=tmp_path / "job2", job_id=job.id, - **execute_tasks_v2_args, + **execute_tasks_args, ) db.expunge_all() dataset_case_2 = await db.get(DatasetV2, dataset_case_2.id) @@ -233,7 +233,7 @@ async def test_dummy_insert_single_image( EXPECTED_NON_PARENT_MSG = ( "Cannot create image if zarr_url is not a subfolder of zarr_dir" ) - execute_tasks_v2_args = dict( + execute_tasks_args = dict( runner=local_runner, user_id=user.id, ) @@ -253,7 +253,7 @@ async def test_dummy_insert_single_image( dataset=dataset, workflow_dir_local=tmp_path / "job3", job_id=job.id, - **execute_tasks_v2_args, + **execute_tasks_args, ) error_msg = str(e.value) debug(error_msg) @@ -709,7 +709,7 @@ async def test_dummy_invalid_output_non_parallel( # case non-parallel task_id = fractal_tasks_mock_db["dummy_insert_single_image"].id async with MockCurrentUser() as user: - execute_tasks_v2_args = dict( + execute_tasks_args = dict( runner=local_runner, user_id=user.id, ) @@ -756,7 +756,7 @@ def patched_cast(*args, **kwargs): dataset=dataset, workflow_dir_local=tmp_path / "job0", job_id=job.id, - **execute_tasks_v2_args, + **execute_tasks_args, ) res = await db.execute( select(HistoryRun).where(HistoryRun.dataset_id == dataset.id) @@ -785,7 +785,7 @@ async def test_dummy_invalid_output_parallel( zarr_dir = (tmp_path / "zarr_dir").as_posix().rstrip("/") task_id = fractal_tasks_mock_db["generic_task_parallel"].id async with MockCurrentUser() as user: - execute_tasks_v2_args = dict( + execute_tasks_args = dict( runner=local_runner, user_id=user.id, ) @@ -836,7 +836,7 @@ def patched_task_output(*args, **kwargs): dataset=dataset, workflow_dir_local=tmp_path / "job0", job_id=job.id, - **execute_tasks_v2_args, + **execute_tasks_args, ) res = await db.execute( select(HistoryRun).where(HistoryRun.dataset_id == dataset.id) diff --git a/tests/v2/test_04_runner/test_fractal_examples.py b/tests/v2/test_04_runner/test_fractal_examples.py index 8b6e032bdb..154ececf7a 100644 --- a/tests/v2/test_04_runner/test_fractal_examples.py +++ b/tests/v2/test_04_runner/test_fractal_examples.py @@ -10,7 +10,7 @@ from fractal_server.runner.executors.local.runner import LocalRunner from .aux_get_dataset_attrs import _get_dataset_attrs -from .execute_tasks_v2 import execute_tasks_mod +from .execute_tasks import execute_tasks_mod @pytest.fixture() diff --git a/tests/v2/test_04_runner/test_no_images_parallelization.py b/tests/v2/test_04_runner/test_no_images_parallelization.py index 6bebe65921..9f95405d04 100644 --- a/tests/v2/test_04_runner/test_no_images_parallelization.py +++ b/tests/v2/test_04_runner/test_no_images_parallelization.py @@ -6,7 +6,7 @@ from fractal_server.runner.exceptions import JobExecutionError from fractal_server.runner.executors.local.runner import LocalRunner -from .execute_tasks_v2 import execute_tasks_mod +from .execute_tasks import execute_tasks_mod @pytest.fixture() diff --git a/tests/v2/test_06_tasks_lifecycle/test_collect_local.py b/tests/v2/test_06_tasks_lifecycle/test_collect_local.py index 344a0284f1..b5130d5348 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_collect_local.py +++ b/tests/v2/test_06_tasks_lifecycle/test_collect_local.py @@ -55,12 +55,12 @@ async def test_collect_pip_existing_folder( profile=profile, ) # Verify that collection failed - task_group_activity_v2 = await db.get( + task_group_activity = await db.get( TaskGroupActivityV2, task_group_activity.id ) - debug(task_group_activity_v2) - assert task_group_activity_v2.status == "failed" - assert task_group_activity_v2.taskgroupv2_id is None + debug(task_group_activity) + assert task_group_activity.status == "failed" + assert task_group_activity.taskgroupv2_id is None async def test_collect_pip_local_fail_rmtree( @@ -133,12 +133,12 @@ def patched_function(*args, **kwargs): "the `rmtree` call that cleans up `tmpdir`. Safe to ignore." ) # Verify that collection failed - task_group_activity_v2 = await db.get( + task_group_activity = await db.get( TaskGroupActivityV2, task_group_activity.id ) - debug(task_group_activity_v2) - assert task_group_activity_v2.status == "failed" - assert "Broken rm" in task_group_activity_v2.log + debug(task_group_activity) + assert task_group_activity.status == "failed" + assert "Broken rm" in task_group_activity.log assert path.exists() diff --git a/tests/v2/test_06_tasks_lifecycle/test_collect_pixi_local.py b/tests/v2/test_06_tasks_lifecycle/test_collect_pixi_local.py index 31504d2541..14881c17d5 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_collect_pixi_local.py +++ b/tests/v2/test_06_tasks_lifecycle/test_collect_pixi_local.py @@ -66,9 +66,9 @@ async def test_collect_local_pixi_path_exists( profile=profile, ) # Verify that collection failed - task_group_activity_v2 = await db.get( + task_group_activity = await db.get( TaskGroupActivityV2, task_group_activity.id ) - debug(task_group_activity_v2) - assert task_group_activity_v2.status == "failed" - assert task_group_activity_v2.taskgroupv2_id is None + debug(task_group_activity) + assert task_group_activity.status == "failed" + assert task_group_activity.taskgroupv2_id is None diff --git a/tests/v2/test_06_tasks_lifecycle/test_deactivate_local.py b/tests/v2/test_06_tasks_lifecycle/test_deactivate_local.py index fe8216fbb9..1b522af90f 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_deactivate_local.py +++ b/tests/v2/test_06_tasks_lifecycle/test_deactivate_local.py @@ -52,12 +52,12 @@ async def test_deactivate_fail_no_venv_path( ) # Verify that deactivate failed - task_group_activity_v2 = await db.get( + task_group_activity = await db.get( TaskGroupActivityV2, task_group_activity.id ) - debug(task_group_activity_v2) - assert task_group_activity_v2.status == "failed" - assert "does not exist" in task_group_activity_v2.log + debug(task_group_activity) + assert task_group_activity.status == "failed" + assert "does not exist" in task_group_activity.log async def test_deactivate_local_fail( @@ -166,13 +166,13 @@ async def test_deactivate_wheel_no_archive_path( profile=profile, ) # Verify that deactivate failed - task_group_activity_v2 = await db.get( + task_group_activity = await db.get( TaskGroupActivityV2, task_group_activity.id ) - debug(task_group_activity_v2) - assert task_group_activity_v2.status == "failed" - assert "does not exist" in task_group_activity_v2.log - assert "Invalid wheel path" in task_group_activity_v2.log + debug(task_group_activity) + assert task_group_activity.status == "failed" + assert "does not exist" in task_group_activity.log + assert "Invalid wheel path" in task_group_activity.log async def test_deactivate_wheel_package_created_before_2_9_0( diff --git a/tests/v2/test_06_tasks_lifecycle/test_deactivate_ssh.py b/tests/v2/test_06_tasks_lifecycle/test_deactivate_ssh.py index d4c399fafe..03f5bc7925 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_deactivate_ssh.py +++ b/tests/v2/test_06_tasks_lifecycle/test_deactivate_ssh.py @@ -71,12 +71,12 @@ async def test_deactivate_fail_no_venv_path( ) # Verify that deactivate failed - task_group_activity_v2 = await db.get( + task_group_activity = await db.get( TaskGroupActivityV2, task_group_activity.id ) - debug(task_group_activity_v2) - assert task_group_activity_v2.status == "failed" - assert "does not exist" in task_group_activity_v2.log + debug(task_group_activity) + assert task_group_activity.status == "failed" + assert "does not exist" in task_group_activity.log _reset_permissions( fractal_ssh=fractal_ssh, @@ -206,13 +206,13 @@ async def test_deactivate_wheel_no_archive_path( profile=profile, ) # Verify that deactivate failed - task_group_activity_v2 = await db.get( + task_group_activity = await db.get( TaskGroupActivityV2, task_group_activity.id ) - debug(task_group_activity_v2) - assert task_group_activity_v2.status == "failed" - assert "does not exist" in task_group_activity_v2.log - assert "Invalid wheel path" in task_group_activity_v2.log + debug(task_group_activity) + assert task_group_activity.status == "failed" + assert "does not exist" in task_group_activity.log + assert "Invalid wheel path" in task_group_activity.log _reset_permissions( fractal_ssh=fractal_ssh, diff --git a/tests/v2/test_06_tasks_lifecycle/test_reactivate_local.py b/tests/v2/test_06_tasks_lifecycle/test_reactivate_local.py index 0da786ebf1..d0936b3e03 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_reactivate_local.py +++ b/tests/v2/test_06_tasks_lifecycle/test_reactivate_local.py @@ -53,12 +53,12 @@ async def test_reactivate_local_venv_exists( ) # Verify that reactivate failed - task_group_activity_v2 = await db.get( + task_group_activity = await db.get( TaskGroupActivityV2, task_group_activity.id ) - debug(task_group_activity_v2) - assert task_group_activity_v2.status == "failed" - assert "already exists" in task_group_activity_v2.log + debug(task_group_activity) + assert task_group_activity.status == "failed" + assert "already exists" in task_group_activity.log @pytest.mark.parametrize("make_rmtree_fail", [False, True]) diff --git a/tests/v2/test_06_tasks_lifecycle/test_reactivate_ssh.py b/tests/v2/test_06_tasks_lifecycle/test_reactivate_ssh.py index 32a485c662..2dcb3a22a1 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_reactivate_ssh.py +++ b/tests/v2/test_06_tasks_lifecycle/test_reactivate_ssh.py @@ -71,12 +71,12 @@ async def test_reactivate_ssh_venv_exists( ) # Verify that reactivate failed - task_group_activity_v2 = await db.get( + task_group_activity = await db.get( TaskGroupActivityV2, task_group_activity.id ) - debug(task_group_activity_v2) - assert task_group_activity_v2.status == "failed" - assert "already exists" in task_group_activity_v2.log + debug(task_group_activity) + assert task_group_activity.status == "failed" + assert "already exists" in task_group_activity.log _reset_permissions( fractal_ssh=fractal_ssh, From 5b90d3b836d8b904536c05f7fbe99c8a3d2a01fd Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Fri, 28 Nov 2025 17:14:14 +0100 Subject: [PATCH 09/13] small fixes --- fractal_server/app/routes/api/v2/submit.py | 2 +- tests/no_version/test_unit_lifespan.py | 16 ++++++++-------- .../test_api_task_collection_ssh.py | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/fractal_server/app/routes/api/v2/submit.py b/fractal_server/app/routes/api/v2/submit.py index d806d8dafd..f0d6a738e0 100644 --- a/fractal_server/app/routes/api/v2/submit.py +++ b/fractal_server/app/routes/api/v2/submit.py @@ -64,7 +64,7 @@ async def apply_workflow( # Remove non-submitted V2 jobs from the app state when the list grows # beyond a threshold # NOTE: this may lead to a race condition on `app.state.jobs` if two - # requests take place at the same time and `clean_app_job_list_v2` is + # requests take place at the same time and `clean_app_job_list` is # somewhat slow. settings = Inject(get_settings) if len(request.app.state.jobs) > settings.FRACTAL_API_MAX_JOB_LIST_LENGTH: diff --git a/tests/no_version/test_unit_lifespan.py b/tests/no_version/test_unit_lifespan.py index 2ec5f003f3..e65d6fbf78 100644 --- a/tests/no_version/test_unit_lifespan.py +++ b/tests/no_version/test_unit_lifespan.py @@ -64,8 +64,8 @@ async def test_app_with_lifespan( await _workflow_insert_task( workflow_id=workflow.id, task_id=task.id, db=db ) - # Create jobv2 with submitted status - jobv2 = await job_factory( + # Create job with submitted status + job = await job_factory( project_id=project.id, workflow_id=workflow.id, dataset_id=dataset1.id, @@ -75,20 +75,20 @@ async def test_app_with_lifespan( ) # append submitted job to jobs status - app.state.jobs.append(jobv2.id) + app.state.jobs.append(job.id) # we need to close the db session to get # updated data from db await db.close() # verify that the shutdown file was created during the lifespan cleanup - assert os.path.exists(f"{jobv2.working_dir}/{SHUTDOWN_FILENAME}") - jobv2_after = ( - await db.execute(select(JobV2).where(JobV2.id == jobv2.id)) + assert os.path.exists(f"{job.working_dir}/{SHUTDOWN_FILENAME}") + job_after = ( + await db.execute(select(JobV2).where(JobV2.id == job.id)) ).scalar_one_or_none() - assert jobv2_after.status == "failed" - assert jobv2_after.log == "\nJob stopped due to app shutdown\n" + assert job_after.status == "failed" + assert job_after.log == "\nJob stopped due to app shutdown\n" async def test_lifespan_shutdown_empty_jobs_list( diff --git a/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_ssh.py b/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_ssh.py index 285ca90c6a..0b84e292cc 100644 --- a/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_ssh.py +++ b/tests/v2/test_06_tasks_lifecycle/test_api_task_collection_ssh.py @@ -75,10 +75,10 @@ async def test_task_collection_ssh_from_pypi( assert res.status_code == 200 task_group_activity = res.json() assert task_group_activity["status"] == "OK" - task_groupv2_id = task_group_activity["taskgroupv2_id"] + task_group_id = task_group_activity["taskgroupv2_id"] # Check env_info attribute in TaskGroupV2 db.expunge_all() - task_group = await db.get(TaskGroupV2, task_groupv2_id) + task_group = await db.get(TaskGroupV2, task_group_id) assert f"testing-tasks-mock=={package_version}" in task_group.env_info # Check venv_size and venv_file_number in TaskGroupV2 assert task_group.venv_size_in_kB is not None From 6a6137096377238be785790ac38fffe08b622e4c Mon Sep 17 00:00:00 2001 From: Yuri Chiucconi Date: Fri, 28 Nov 2025 17:24:35 +0100 Subject: [PATCH 10/13] comments and logs --- .github/workflows/ci.yml | 8 ++--- fractal_server/app/routes/api/v2/submit.py | 2 +- fractal_server/runner/v2/runner.py | 2 +- fractal_server/tasks/v2/utils_database.py | 4 +-- scripts/validate_db_data_with_read_schemas.py | 30 +++++++++---------- tests/v2/test_03_api/test_api_dataset.py | 6 ++-- tests/v2/test_03_api/test_api_project.py | 2 +- 7 files changed, 26 insertions(+), 28 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eb3579e0f9..095e9f0410 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: tests_api: - name: "V2 - API - Python ${{ matrix.python-version }}" + name: "API - Python ${{ matrix.python-version }}" runs-on: ubuntu-24.04 timeout-minutes: 30 @@ -61,7 +61,7 @@ jobs: tests_not_api: - name: "V2 - not API - Python ${{ matrix.python-version }}" + name: "Not API - Python ${{ matrix.python-version }}" runs-on: ubuntu-24.04 timeout-minutes: 30 @@ -110,7 +110,7 @@ jobs: tests_containers: - name: "V2 - Containers - Python ${{ matrix.python-version }}" + name: "Containers - Python ${{ matrix.python-version }}" runs-on: ubuntu-24.04 timeout-minutes: 30 @@ -171,7 +171,7 @@ jobs: tests_oauth: - name: "V2 - OAuth - Python ${{ matrix.python-version }}" + name: "OAuth - Python ${{ matrix.python-version }}" runs-on: ubuntu-24.04 timeout-minutes: 30 diff --git a/fractal_server/app/routes/api/v2/submit.py b/fractal_server/app/routes/api/v2/submit.py index f0d6a738e0..cecdb85d30 100644 --- a/fractal_server/app/routes/api/v2/submit.py +++ b/fractal_server/app/routes/api/v2/submit.py @@ -61,7 +61,7 @@ async def apply_workflow( user: UserOAuth = Depends(current_user_act_ver_prof), db: AsyncSession = Depends(get_async_db), ) -> JobRead | None: - # Remove non-submitted V2 jobs from the app state when the list grows + # Remove non-submitted Jobs from the app state when the list grows # beyond a threshold # NOTE: this may lead to a race condition on `app.state.jobs` if two # requests take place at the same time and `clean_app_job_list` is diff --git a/fractal_server/runner/v2/runner.py b/fractal_server/runner/v2/runner.py index 09eaabd84d..65216d4f91 100644 --- a/fractal_server/runner/v2/runner.py +++ b/fractal_server/runner/v2/runner.py @@ -218,7 +218,7 @@ def execute_tasks( ) raise JobExecutionError(error_msg) - # TASK EXECUTION (V2) + # TASK EXECUTION try: if task.type in [ TaskType.NON_PARALLEL, diff --git a/fractal_server/tasks/v2/utils_database.py b/fractal_server/tasks/v2/utils_database.py index 1a2e7da640..9ac226ccd5 100644 --- a/fractal_server/tasks/v2/utils_database.py +++ b/fractal_server/tasks/v2/utils_database.py @@ -17,7 +17,7 @@ def create_db_tasks_and_update_task_group_sync( Args: task_group_id: ID of an existing `TaskGroupV2` object. - task_list: List of `TaskCreateV2` objects to be inserted into the db. + task_list: List of `TaskCreate` objects to be inserted into the db. db: Synchronous database session Returns: @@ -44,7 +44,7 @@ async def create_db_tasks_and_update_task_group_async( Args: task_group_id: ID of an existing `TaskGroupV2` object. - task_list: List of `TaskCreateV2` objects to be inserted into the db. + task_list: List of `TaskCreate` objects to be inserted into the db. db: Synchronous database session Returns: diff --git a/scripts/validate_db_data_with_read_schemas.py b/scripts/validate_db_data_with_read_schemas.py index d1bd4e47ee..722dc4f699 100644 --- a/scripts/validate_db_data_with_read_schemas.py +++ b/scripts/validate_db_data_with_read_schemas.py @@ -67,23 +67,21 @@ f"{user_ids_not_in_default_group}" ) - # V2 - - # PROJECTS V2 + # PROJECTS stm = select(ProjectV2) projects = db.execute(stm).scalars().all() for project in sorted(projects, key=lambda x: x.id): ProjectRead(**project.model_dump()) - print(f"V2 - Project {project.id} validated") + print(f"Project {project.id} validated") - # TASKS V2 + # TASKS stm = select(TaskV2) tasks = db.execute(stm).scalars().all() for task in sorted(tasks, key=lambda x: x.id): TaskRead(**task.model_dump()) - print(f"V2 - Task {task.id} validated") + print(f"Task {task.id} validated") - # TASK GROUPS V2 + # TASK GROUPS stm = select(TaskGroupV2) task_groups = db.execute(stm).scalars().all() for task_group in sorted(task_groups, key=lambda x: x.id): @@ -91,16 +89,16 @@ for task in task_group.task_list: task_list.append(TaskRead(**task.model_dump())) TaskGroupRead(**task_group.model_dump(), task_list=task_list) - print(f"V2 - TaskGroup {task_group.id} validated") + print(f"TaskGroup {task_group.id} validated") - # TASK GROUP V2 ACTIVITIES + # TASK GROUP ACTIVITIES stm = select(TaskGroupActivityV2) task_group_activities = db.execute(stm).scalars().all() for activity in sorted(task_group_activities, key=lambda x: x.id): TaskGroupActivityRead(**activity.model_dump()) - print(f"V2 - TaskGroupActivity {activity.id} validated") + print(f"TaskGroupActivity {activity.id} validated") - # WORKFLOWS V2 + # WORKFLOWS stm = select(WorkflowV2) workflows = db.execute(stm).scalars().all() for workflow in sorted(workflows, key=lambda x: x.id): @@ -119,9 +117,9 @@ project=ProjectRead(**workflow.project.model_dump()), task_list=task_list, ) - print(f"V2 - Workflow {workflow.id} validated") + print(f"Workflow {workflow.id} validated") - # DATASETS V2 + # DATASETS stm = select(DatasetV2) datasets = db.execute(stm).scalars().all() for dataset in sorted(datasets, key=lambda x: x.id): @@ -129,11 +127,11 @@ **dataset.model_dump(), project=ProjectRead(**dataset.project.model_dump()), ) - print(f"V2 - Dataset {dataset.id} validated") + print(f"Dataset {dataset.id} validated") - # JOBS V2 + # JOBS stm = select(JobV2) jobs = db.execute(stm).scalars().all() for job in sorted(jobs, key=lambda x: x.id): JobRead(**job.model_dump()) - print(f"V2 - Job {job.id} validated") + print(f"Job {job.id} validated") diff --git a/tests/v2/test_03_api/test_api_dataset.py b/tests/v2/test_03_api/test_api_dataset.py index 864065dd66..acfdcf0607 100644 --- a/tests/v2/test_03_api/test_api_dataset.py +++ b/tests/v2/test_03_api/test_api_dataset.py @@ -40,11 +40,11 @@ async def test_new_dataset( ): resource, profile = local_resource_profile_db async with MockCurrentUser(user_kwargs=dict(profile_id=profile.id)) as user: - res = await client.post("api/v2/project/", json=dict(name="projectV2")) + res = await client.post("api/v2/project/", json=dict(name="project")) debug(res.json()) assert res.status_code == 201 - projectV2 = res.json() - p2_id = projectV2["id"] + project = res.json() + p2_id = project["id"] # POST diff --git a/tests/v2/test_03_api/test_api_project.py b/tests/v2/test_03_api/test_api_project.py index d3dac842da..012b0614ac 100644 --- a/tests/v2/test_03_api/test_api_project.py +++ b/tests/v2/test_03_api/test_api_project.py @@ -57,7 +57,7 @@ async def test_post_and_get_project( assert res.status_code == 201 assert len(await _project_list(userB, db)) == 1 - # a user can't create two projectsV2 with the same name + # a user can't create two projects with the same name res = await client.post(f"{PREFIX}/project/", json=dict(name="project")) assert res.status_code == 422 assert len(await _project_list(userB, db)) == 1 From 63e48f862e324caa34af0bc5f62f367865677c9f Mon Sep 17 00:00:00 2001 From: Tommaso Comparin <3862206+tcompa@users.noreply.github.com> Date: Mon, 1 Dec 2025 10:33:16 +0100 Subject: [PATCH 11/13] revert some changes --- fractal_server/app/schemas/v2/manifest.py | 4 ++-- tests/v2/test_01_schemas/test_unit_json_schemas_v2.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/fractal_server/app/schemas/v2/manifest.py b/fractal_server/app/schemas/v2/manifest.py index d079693f77..73653c764b 100644 --- a/fractal_server/app/schemas/v2/manifest.py +++ b/fractal_server/app/schemas/v2/manifest.py @@ -145,14 +145,14 @@ def _check_args_schemas_are_present(self): if task.executable_parallel is not None: if task.args_schema_parallel is None: raise ValueError( - f"ManifestV2 has {has_args_schemas=}, but " + f"Manifest has {has_args_schemas=}, but " f"task '{task.name}' has " f"{task.args_schema_parallel=}." ) if task.executable_non_parallel is not None: if task.args_schema_non_parallel is None: raise ValueError( - f"ManifestV2 has {has_args_schemas=}, but " + f"Manifest has {has_args_schemas=}, but " f"task '{task.name}' has " f"{task.args_schema_non_parallel=}." ) diff --git a/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py b/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py index 41799e1a97..1de4b814e3 100644 --- a/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py +++ b/tests/v2/test_01_schemas/test_unit_json_schemas_v2.py @@ -5,7 +5,7 @@ from fractal_server.app.schemas.v2.manifest import ManifestV2 -def test_Manifest_jsonschema(): +def test_ManifestV2_jsonschema(): """ Generate a JSON Schema from the ManifestV1 Pydantic model, and compare it with the one currently present in the repository. From 842b38fb5b91a5e57083ecfc277a81de827f4187 Mon Sep 17 00:00:00 2001 From: Tommaso Comparin <3862206+tcompa@users.noreply.github.com> Date: Mon, 1 Dec 2025 10:34:02 +0100 Subject: [PATCH 12/13] fix test --- tests/v2/test_01_schemas/test_schemas_manifest.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/v2/test_01_schemas/test_schemas_manifest.py b/tests/v2/test_01_schemas/test_schemas_manifest.py index 9bb8ab19e0..aaa2b6f3a9 100644 --- a/tests/v2/test_01_schemas/test_schemas_manifest.py +++ b/tests/v2/test_01_schemas/test_schemas_manifest.py @@ -145,7 +145,7 @@ def test_ManifestV2(): non_parallel_schema, ], ) - assert "ManifestV2 has has_args_schemas=True" in msg(e) + assert "Manifest has has_args_schemas=True" in msg(e) # 3: compound_just_parallel_schemas with pytest.raises(ValidationError) as e: @@ -158,7 +158,7 @@ def test_ManifestV2(): non_parallel_schema, ], ) - assert "ManifestV2 has has_args_schemas=True" in msg(e) + assert "Manifest has has_args_schemas=True" in msg(e) # 4: compound_no_schemas with pytest.raises(ValidationError) as e: @@ -171,7 +171,7 @@ def test_ManifestV2(): non_parallel_schema, ], ) - assert "ManifestV2 has has_args_schemas=True" in msg(e) + assert "Manifest has has_args_schemas=True" in msg(e) # 5: parallel_no_schema with pytest.raises(ValidationError) as e: @@ -184,7 +184,7 @@ def test_ManifestV2(): non_parallel_schema, ], ) - assert "ManifestV2 has has_args_schemas=True" in msg(e) + assert "Manifest has has_args_schemas=True" in msg(e) # 6: non_parallel_no_schema with pytest.raises(ValidationError) as e: @@ -197,7 +197,7 @@ def test_ManifestV2(): non_parallel_no_schema, ], ) - assert "ManifestV2 has has_args_schemas=True" in msg(e) + assert "Manifest has has_args_schemas=True" in msg(e) # 7: Non-unique task names with pytest.raises(ValidationError) as e: From f9ad128bc05e6afa4b21b38da56b6bc38110a6c5 Mon Sep 17 00:00:00 2001 From: Tommaso Comparin <3862206+tcompa@users.noreply.github.com> Date: Mon, 1 Dec 2025 10:36:09 +0100 Subject: [PATCH 13/13] CHANGELOG [skip ci] --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9a384297ae..cdafb08760 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,8 @@ The main contents of this release are the introduction of the project sharing an * Settings: * Drop `DataSettings` (\#3031). * Reduce API logging level for some endpoints (\#3010). +* Internal: + * Remove the "V2" label from names of internal schemas and API route tags (\#3037). * Testing: * Expand SLURM-batching-heuristics test (\#3011). * Dependencies: