Skip to content
8 changes: 4 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:

tests_api:

name: "V2 - API - Python ${{ matrix.python-version }}"
name: "API - Python ${{ matrix.python-version }}"
runs-on: ubuntu-24.04
timeout-minutes: 30

Expand Down Expand Up @@ -61,7 +61,7 @@ jobs:

tests_not_api:

name: "V2 - not API - Python ${{ matrix.python-version }}"
name: "Not API - Python ${{ matrix.python-version }}"
runs-on: ubuntu-24.04
timeout-minutes: 30

Expand Down Expand Up @@ -110,7 +110,7 @@ jobs:

tests_containers:

name: "V2 - Containers - Python ${{ matrix.python-version }}"
name: "Containers - Python ${{ matrix.python-version }}"
runs-on: ubuntu-24.04
timeout-minutes: 30

Expand Down Expand Up @@ -171,7 +171,7 @@ jobs:

tests_oauth:

name: "V2 - OAuth - Python ${{ matrix.python-version }}"
name: "OAuth - Python ${{ matrix.python-version }}"
runs-on: ubuntu-24.04
timeout-minutes: 30

Expand Down
68 changes: 32 additions & 36 deletions benchmarks/populate_db/populate_db_script.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from fractal_server.app.schemas.user import UserCreate
from fractal_server.app.schemas.v2 import DatasetImportV2
from fractal_server.app.schemas.v2 import JobCreateV2
from fractal_server.app.schemas.v2 import ProjectCreateV2
from fractal_server.app.schemas.v2 import WorkflowCreateV2
from fractal_server.app.schemas.v2 import WorkflowTaskCreateV2
from fractal_server.app.schemas.v2 import DatasetImport
from fractal_server.app.schemas.v2 import JobCreate
from fractal_server.app.schemas.v2 import ProjectCreate
from fractal_server.app.schemas.v2 import WorkflowCreate
from fractal_server.app.schemas.v2 import WorkflowTaskCreate
from scripts.client import FractalClient


Expand Down Expand Up @@ -61,19 +61,17 @@ def _user_flow_vanilla(
working_task_id: int,
):
user = _create_user_client(admin, user_identifier="vanilla")
proj = user.add_project(ProjectCreateV2(name="MyProject_uv"))
proj = user.add_project(ProjectCreate(name="MyProject_uv"))
image_list = create_image_list(n_images=10)
ds = user.import_dataset(
proj.id,
DatasetImportV2(
DatasetImport(
name="MyDataset", zarr_dir="/invalid/zarr", images=image_list
),
)
wf = user.add_workflow(proj.id, WorkflowCreateV2(name="MyWorkflow"))
user.add_workflowtask(
proj.id, wf.id, working_task_id, WorkflowTaskCreateV2()
)
user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreateV2())
wf = user.add_workflow(proj.id, WorkflowCreate(name="MyWorkflow"))
user.add_workflowtask(proj.id, wf.id, working_task_id, WorkflowTaskCreate())
user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreate())


# power user:
Expand All @@ -88,29 +86,29 @@ def _user_flow_power(
failing_task_id: int,
):
user = _create_user_client(admin, user_identifier="power")
proj = user.add_project(ProjectCreateV2(name="MyProject_upw"))
proj = user.add_project(ProjectCreate(name="MyProject_upw"))
# we add also a dataset with images
image_list = create_image_list(n_images=100)
num_workflows = 20
num_jobs_per_workflow = 20
for ind_wf in range(num_workflows):
wf = user.add_workflow(
proj.id, WorkflowCreateV2(name=f"MyWorkflow-{ind_wf}")
proj.id, WorkflowCreate(name=f"MyWorkflow-{ind_wf}")
)
user.add_workflowtask(
proj.id, wf.id, working_task_id, WorkflowTaskCreateV2()
proj.id, wf.id, working_task_id, WorkflowTaskCreate()
)
if ind_wf % 2 == 0:
user.add_workflowtask(
proj.id, wf.id, working_task_id, WorkflowTaskCreateV2()
proj.id, wf.id, working_task_id, WorkflowTaskCreate()
)
user.add_workflowtask(
proj.id, wf.id, failing_task_id, WorkflowTaskCreateV2()
proj.id, wf.id, failing_task_id, WorkflowTaskCreate()
)
for ind_job in range(num_jobs_per_workflow):
ds = user.import_dataset(
proj.id,
DatasetImportV2(
DatasetImport(
name="MyDataset",
zarr_dir="/invalid/zarr",
images=image_list,
Expand All @@ -120,7 +118,7 @@ def _user_flow_power(
proj.id,
wf.id,
ds.id,
applyworkflow=JobCreateV2(),
applyworkflow=JobCreate(),
)


Expand All @@ -134,14 +132,14 @@ def _user_flow_dataset(
working_task_id: int,
):
user = _create_user_client(admin, user_identifier="dataset")
proj = user.add_project(ProjectCreateV2(name="MyProject_us"))
proj = user.add_project(ProjectCreate(name="MyProject_us"))
image_list = create_image_list(n_images=1000)
n_datasets = 20
ds_list = []
for i in range(n_datasets):
ds = user.import_dataset(
proj.id,
DatasetImportV2(
DatasetImport(
name=f"MyDataset_us-{i}",
zarr_dir="/invalid/zarr",
images=image_list,
Expand All @@ -152,17 +150,17 @@ def _user_flow_dataset(
num_workflows = 20
for i in range(num_workflows):
wf = user.add_workflow(
proj.id, WorkflowCreateV2(name=f"MyWorkflow_us-{i}")
proj.id, WorkflowCreate(name=f"MyWorkflow_us-{i}")
)
user.add_workflowtask(
proj.id, wf.id, working_task_id, WorkflowTaskCreateV2()
proj.id, wf.id, working_task_id, WorkflowTaskCreate()
)
for ds in ds_list:
user.submit_job(
proj.id,
wf.id,
ds.id,
applyworkflow=JobCreateV2(),
applyworkflow=JobCreate(),
)


Expand All @@ -180,27 +178,27 @@ def _user_flow_project(
num_jobs_per_workflow = 5
image_list = create_image_list(100)
for i in range(n_projects):
proj = user.add_project(ProjectCreateV2(name=f"MyProject_upj-{i}"))
proj = user.add_project(ProjectCreate(name=f"MyProject_upj-{i}"))
ds = user.import_dataset(
proj.id,
DatasetImportV2(
DatasetImport(
name=f"MyDataset_up-{i}",
zarr_dir="/invalid/zarr",
images=image_list,
),
)
wf = user.add_workflow(
proj.id, WorkflowCreateV2(name=f"MyWorkflow_up-{i}")
proj.id, WorkflowCreate(name=f"MyWorkflow_up-{i}")
)
user.add_workflowtask(
proj.id, wf.id, working_task_id, WorkflowTaskCreateV2()
proj.id, wf.id, working_task_id, WorkflowTaskCreate()
)
for i in range(num_jobs_per_workflow):
user.submit_job(
proj.id,
wf.id,
ds.id,
applyworkflow=JobCreateV2(),
applyworkflow=JobCreate(),
)


Expand All @@ -214,21 +212,19 @@ def _user_flow_job(
working_task_id: int,
):
user = _create_user_client(admin, user_identifier="job")
proj = user.add_project(ProjectCreateV2(name="MyProject_uj"))
proj = user.add_project(ProjectCreate(name="MyProject_uj"))
image_list = create_image_list(n_images=10)
ds = user.import_dataset(
proj.id,
DatasetImportV2(
DatasetImport(
name="MyDataset", zarr_dir="/invalid/zarr", images=image_list
),
)
wf = user.add_workflow(proj.id, WorkflowCreateV2(name="MyWorkflow_uj"))
user.add_workflowtask(
proj.id, wf.id, working_task_id, WorkflowTaskCreateV2()
)
wf = user.add_workflow(proj.id, WorkflowCreate(name="MyWorkflow_uj"))
user.add_workflowtask(proj.id, wf.id, working_task_id, WorkflowTaskCreate())
num_jobs_per_workflow = 100
for i in range(num_jobs_per_workflow):
user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreateV2())
user.submit_job(proj.id, wf.id, ds.id, applyworkflow=JobCreate())


if __name__ == "__main__":
Expand Down
4 changes: 2 additions & 2 deletions fractal_server/app/models/v2/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from sqlmodel import Field
from sqlmodel import SQLModel

from fractal_server.app.schemas.v2 import JobStatusTypeV2
from fractal_server.app.schemas.v2 import JobStatusType
from fractal_server.utils import get_timestamp


Expand Down Expand Up @@ -56,7 +56,7 @@ class JobV2(SQLModel, table=True):
end_timestamp: datetime | None = Field(
default=None, sa_column=Column(DateTime(timezone=True))
)
status: str = JobStatusTypeV2.SUBMITTED
status: str = JobStatusType.SUBMITTED
log: str | None = None
executor_error_log: str | None = None

Expand Down
22 changes: 10 additions & 12 deletions fractal_server/app/routes/admin/v2/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,14 @@
from .task_group import router as task_group_router
from .task_group_lifecycle import router as task_group_lifecycle_router

router_admin_v2 = APIRouter()
router_admin = APIRouter()

router_admin_v2.include_router(accounting_router, prefix="/accounting")
router_admin_v2.include_router(job_router, prefix="/job")
router_admin_v2.include_router(task_router, prefix="/task")
router_admin_v2.include_router(task_group_router, prefix="/task-group")
router_admin_v2.include_router(
task_group_lifecycle_router, prefix="/task-group"
)
router_admin_v2.include_router(impersonate_router, prefix="/impersonate")
router_admin_v2.include_router(resource_router, prefix="/resource")
router_admin_v2.include_router(profile_router, prefix="/profile")
router_admin_v2.include_router(sharing_router, prefix="/linkuserproject")
router_admin.include_router(accounting_router, prefix="/accounting")
router_admin.include_router(job_router, prefix="/job")
router_admin.include_router(task_router, prefix="/task")
router_admin.include_router(task_group_router, prefix="/task-group")
router_admin.include_router(task_group_lifecycle_router, prefix="/task-group")
router_admin.include_router(impersonate_router, prefix="/impersonate")
router_admin.include_router(resource_router, prefix="/resource")
router_admin.include_router(profile_router, prefix="/profile")
router_admin.include_router(sharing_router, prefix="/linkuserproject")
30 changes: 15 additions & 15 deletions fractal_server/app/routes/admin/v2/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,24 +24,24 @@
from fractal_server.app.routes.pagination import PaginationResponse
from fractal_server.app.routes.pagination import get_pagination_params
from fractal_server.app.schemas.v2 import HistoryUnitStatus
from fractal_server.app.schemas.v2 import JobReadV2
from fractal_server.app.schemas.v2 import JobStatusTypeV2
from fractal_server.app.schemas.v2 import JobUpdateV2
from fractal_server.app.schemas.v2 import JobRead
from fractal_server.app.schemas.v2 import JobStatusType
from fractal_server.app.schemas.v2 import JobUpdate
from fractal_server.runner.filenames import WORKFLOW_LOG_FILENAME
from fractal_server.utils import get_timestamp
from fractal_server.zip_tools import _zip_folder_to_byte_stream_iterator

router = APIRouter()


@router.get("/", response_model=PaginationResponse[JobReadV2])
@router.get("/", response_model=PaginationResponse[JobRead])
async def view_job(
id: int | None = None,
user_id: int | None = None,
project_id: int | None = None,
dataset_id: int | None = None,
workflow_id: int | None = None,
status: JobStatusTypeV2 | None = None,
status: JobStatusType | None = None,
start_timestamp_min: AwareDatetime | None = None,
start_timestamp_max: AwareDatetime | None = None,
end_timestamp_min: AwareDatetime | None = None,
Expand All @@ -50,7 +50,7 @@ async def view_job(
pagination: PaginationRequest = Depends(get_pagination_params),
user: UserOAuth = Depends(current_superuser_act),
db: AsyncSession = Depends(get_async_db),
) -> PaginationResponse[JobReadV2]:
) -> PaginationResponse[JobRead]:
"""
Query `JobV2` table.

Expand Down Expand Up @@ -154,13 +154,13 @@ async def view_job(
)


@router.get("/{job_id}/", response_model=JobReadV2)
@router.get("/{job_id}/", response_model=JobRead)
async def view_single_job(
job_id: int,
show_tmp_logs: bool = False,
user: UserOAuth = Depends(current_superuser_act),
db: AsyncSession = Depends(get_async_db),
) -> JobReadV2:
) -> JobRead:
job = await db.get(JobV2, job_id)
if not job:
raise HTTPException(
Expand All @@ -169,7 +169,7 @@ async def view_single_job(
)
await db.close()

if show_tmp_logs and (job.status == JobStatusTypeV2.SUBMITTED):
if show_tmp_logs and (job.status == JobStatusType.SUBMITTED):
try:
with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}") as f:
job.log = f.read()
Expand All @@ -179,13 +179,13 @@ async def view_single_job(
return job


@router.patch("/{job_id}/", response_model=JobReadV2)
@router.patch("/{job_id}/", response_model=JobRead)
async def update_job(
job_update: JobUpdateV2,
job_update: JobUpdate,
job_id: int,
user: UserOAuth = Depends(current_superuser_act),
db: AsyncSession = Depends(get_async_db),
) -> JobReadV2 | None:
) -> JobRead | None:
"""
Change the status of an existing job.

Expand All @@ -198,13 +198,13 @@ async def update_job(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Job {job_id} not found",
)
if job.status != JobStatusTypeV2.SUBMITTED:
if job.status != JobStatusType.SUBMITTED:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
detail=f"Job {job_id} has status {job.status=} != 'submitted'.",
)

if job_update.status != JobStatusTypeV2.FAILED:
if job_update.status != JobStatusType.FAILED:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
detail=f"Cannot set job status to {job_update.status}",
Expand All @@ -217,7 +217,7 @@ async def update_job(
job,
"log",
f"{job.log or ''}\nThis job was manually marked as "
f"'{JobStatusTypeV2.FAILED}' by an admin ({timestamp.isoformat()}).",
f"'{JobStatusType.FAILED}' by an admin ({timestamp.isoformat()}).",
)

res = await db.execute(
Expand Down
Loading
Loading