diff --git a/pyproject.toml b/pyproject.toml index d2b59cb59..ce9faac27 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -151,7 +151,9 @@ ignore = [ # too-many-return-statements "PLR0911", # flake8-type-checking (150 errors) - "TC" + "TC", + # PT013: Arguments starting with underscore in function definitions starting with `test_` (e.g., test_connectivity in event_resource_api.py) + "PT019" ] [tool.ruff.lint.isort] @@ -215,20 +217,10 @@ plugins = [ "pydantic.mypy" ] disable_error_code = [ - "no-redef", - "return-value", - "var-annotated", "assignment", - "call-arg", "arg-type", - "override", - "dict-item", - "index", - "operator", - "call-overload", "misc", "attr-defined", - "union-attr", "name-defined", ] diff --git a/src/conductor/asyncio_client/adapters/api/admin_resource_api.py b/src/conductor/asyncio_client/adapters/api/admin_resource_api.py index 16af873fe..176f7a4c9 100644 --- a/src/conductor/asyncio_client/adapters/api/admin_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/admin_resource_api.py @@ -1,4 +1,135 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, List, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.task_adapter import TaskAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter from conductor.asyncio_client.http.api import AdminResourceApi -class AdminResourceApiAdapter(AdminResourceApi): ... +class AdminResourceApiAdapter: + """Adapter for AdminResourceApi that converts between generated models and adapters.""" + + def __init__(self, api_client: ApiClient): + self._api = AdminResourceApi(api_client) + + async def clear_task_execution_cache( + self, + task_def_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Clear the task execution cache""" + await self._api.clear_task_execution_cache( + task_def_name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get_redis_usage( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, object]: + """Get the Redis usage""" + return await self._api.get_redis_usage( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def requeue_sweep( + self, + workflow_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Requeue sweep""" + return await self._api.requeue_sweep( + workflow_id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def verify_and_repair_workflow_consistency( + self, + workflow_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Verify and repair workflow consistency""" + return await self._api.verify_and_repair_workflow_consistency( + workflow_id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def view( + self, + tasktype: StrictStr, + start: Optional[StrictInt] = None, + count: Optional[StrictInt] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[TaskAdapter]: + """View tasks""" + result = await self._api.view( + tasktype, + start, + count, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, TaskAdapter) diff --git a/src/conductor/asyncio_client/adapters/api/application_resource_api.py b/src/conductor/asyncio_client/adapters/api/application_resource_api.py index 47ff4ba24..19050f776 100644 --- a/src/conductor/asyncio_client/adapters/api/application_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/application_resource_api.py @@ -4,6 +4,7 @@ from pydantic import Field, StrictFloat, StrictInt, StrictStr +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.create_or_update_application_request_adapter import ( CreateOrUpdateApplicationRequestAdapter, ) @@ -11,10 +12,16 @@ ExtendedConductorApplicationAdapter, ) from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter from conductor.asyncio_client.http.api import ApplicationResourceApi -class ApplicationResourceApiAdapter(ApplicationResourceApi): +class ApplicationResourceApiAdapter: + """Adapter for ApplicationResourceApi that converts between generated models and adapters.""" + + def __init__(self, api_client: ApiClient): + self._api = ApplicationResourceApi(api_client) + async def create_access_key( self, id: StrictStr, @@ -27,11 +34,12 @@ async def create_access_key( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ): - if not id: - id = None - return await super().create_access_key( - id, + ) -> object: + """Create an access key""" + normalized_id: Optional[StrictStr] = id or None + + return await self._api.create_access_key( + normalized_id, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, @@ -52,14 +60,14 @@ async def add_role_to_application_user( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ): - if not application_id: - application_id = None - if not role: - role = None - return await super().add_role_to_application_user( - application_id, - role, + ) -> object: + """Add a role to an application user""" + normalized_application_id: Optional[StrictStr] = application_id or None + normalized_role: Optional[StrictStr] = role or None + + return await self._api.add_role_to_application_user( + normalized_application_id, + normalized_role, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, @@ -80,14 +88,14 @@ async def delete_access_key( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ): - if not application_id: - application_id = None - if not key_id: - key_id = None - return await super().delete_access_key( - application_id, - key_id, + ) -> object: + """Delete an access key""" + normalized_application_id: Optional[StrictStr] = application_id or None + normalized_key_id: Optional[StrictStr] = key_id or None + + return await self._api.delete_access_key( + normalized_application_id, + normalized_key_id, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, @@ -108,14 +116,14 @@ async def remove_role_from_application_user( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ): - if not application_id: - application_id = None - if not role: - role = None - return await super().remove_role_from_application_user( - application_id, - role, + ) -> object: + """Remove role from application user""" + normalized_application_id: Optional[StrictStr] = application_id or None + normalized_role: Optional[StrictStr] = role or None + + return await self._api.remove_role_from_application_user( + normalized_application_id, + normalized_role, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, @@ -135,18 +143,18 @@ async def get_app_by_access_key_id( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> Optional[ExtendedConductorApplicationAdapter]: - if not access_key_id: - access_key_id = None - result = await super().get_app_by_access_key_id( - access_key_id, + ) -> object: + """Get application by access key_id""" + normalized_access_key_id: Optional[StrictStr] = access_key_id or None + + return await self._api.get_app_by_access_key_id( + normalized_access_key_id, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) - return result async def get_access_keys( self, @@ -160,11 +168,12 @@ async def get_access_keys( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ): - if not id: - id = None - return await super().get_access_keys( - id, + ) -> object: + """Get access keys for an application""" + normalized_id: Optional[StrictStr] = id or None + + return await self._api.get_access_keys( + normalized_id, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, @@ -185,14 +194,14 @@ async def toggle_access_key_status( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ): - if not application_id: - application_id = None - if not key_id: - key_id = None - return await super().toggle_access_key_status( - application_id, - key_id, + ) -> object: + """Toggle access key status""" + normalized_application_id: Optional[StrictStr] = application_id or None + normalized_key_id: Optional[StrictStr] = key_id or None + + return await self._api.toggle_access_key_status( + normalized_application_id, + normalized_key_id, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, @@ -213,17 +222,18 @@ async def get_tags_for_application( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[TagAdapter]: - if not id: - id = None - result = await super().get_tags_for_application( - id, + """Get tags for an application""" + normalized_id: Optional[StrictStr] = id or None + + result = await self._api.get_tags_for_application( + normalized_id, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, TagAdapter) async def put_tag_for_application( self, @@ -239,13 +249,13 @@ async def put_tag_for_application( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - if not id: - id = None - if not tag: - tag = None - return await super().put_tag_for_application( - id, - tag, + """Put tag for an application""" + normalized_id: Optional[StrictStr] = id or None + normalized_tag: Optional[List[TagAdapter]] = tag or None + + await self._api.put_tag_for_application( + normalized_id, + normalized_tag, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, @@ -267,13 +277,13 @@ async def delete_tag_for_application( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - if not id: - id = None - if not tag: - tag = None - return await super().delete_tag_for_application( - id, - tag, + """Delete tag for an application""" + normalized_id: Optional[StrictStr] = id or None + normalized_tag: Optional[List[TagAdapter]] = tag or None + + return await self._api.delete_tag_for_application( + normalized_id, + normalized_tag, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, @@ -293,8 +303,9 @@ async def create_application( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ExtendedConductorApplicationAdapter: - result = await super().create_application( + ) -> object: + """Create an application""" + return await self._api.create_application( create_or_update_application_request, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -302,7 +313,6 @@ async def create_application( _headers=_headers, _host_index=_host_index, ) - return result async def update_application( self, @@ -317,8 +327,9 @@ async def update_application( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ExtendedConductorApplicationAdapter: - result = await super().update_application( + ) -> object: + """Update an application""" + return await self._api.update_application( id, create_or_update_application_request, _request_timeout=_request_timeout, @@ -327,7 +338,6 @@ async def update_application( _headers=_headers, _host_index=_host_index, ) - return result async def get_application( self, @@ -341,8 +351,9 @@ async def get_application( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ExtendedConductorApplicationAdapter: - result = await super().get_application( + ) -> object: + """Get an application""" + return await self._api.get_application( id, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -350,7 +361,6 @@ async def get_application( _headers=_headers, _host_index=_host_index, ) - return result async def list_applications( self, @@ -364,11 +374,35 @@ async def list_applications( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[ExtendedConductorApplicationAdapter]: - result = await super().list_applications( + """List applications""" + result = await self._api.list_applications( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, ExtendedConductorApplicationAdapter) + + async def delete_application( + self, + id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Delete an application""" + return await self._api.delete_application( + id, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) - return result diff --git a/src/conductor/asyncio_client/adapters/api/authorization_resource_api.py b/src/conductor/asyncio_client/adapters/api/authorization_resource_api.py index 872a72800..86c31fa3b 100644 --- a/src/conductor/asyncio_client/adapters/api/authorization_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/authorization_resource_api.py @@ -1,4 +1,65 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.authorization_request_adapter import ( + AuthorizationRequestAdapter, +) from conductor.asyncio_client.http.api import AuthorizationResourceApi -class AuthorizationResourceApiAdapter(AuthorizationResourceApi): ... +class AuthorizationResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self.api = AuthorizationResourceApi(api_client) + + async def get_permissions( + self, + type: StrictStr, + id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Get permissions""" + return await self.api.get_permissions(type, id) + + async def grant_permissions( + self, + authorization_request: AuthorizationRequestAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Grant permissions""" + return await self.api.grant_permissions(authorization_request) + + async def remove_permissions( + self, + authorization_request: AuthorizationRequestAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Remove permissions""" + return await self.api.remove_permissions(authorization_request) diff --git a/src/conductor/asyncio_client/adapters/api/environment_resource_api.py b/src/conductor/asyncio_client/adapters/api/environment_resource_api.py index 892b50b51..c3a8ed220 100644 --- a/src/conductor/asyncio_client/adapters/api/environment_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/environment_resource_api.py @@ -1,4 +1,186 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, List, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.environment_variable_adapter import ( + EnvironmentVariableAdapter, +) +from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter from conductor.asyncio_client.http.api import EnvironmentResourceApi -class EnvironmentResourceApiAdapter(EnvironmentResourceApi): ... +class EnvironmentResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = EnvironmentResourceApi(api_client) + + async def create_or_update_env_variable( + self, + key: StrictStr, + body: Annotated[str, Field(min_length=0, strict=True, max_length=65535)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Create or update environment variable""" + await self._api.create_or_update_env_variable( + key, + body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_env_variable( + self, + key: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Delete environment variable""" + return await self._api.delete_env_variable( + key, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_tag_for_env_var( + self, + name: StrictStr, + tag: List[TagAdapter], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete tag for environment variable""" + await self._api.delete_tag_for_env_var( + name, + tag, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get2( + self, + key: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get environment variable by key""" + return await self._api.get2( + key, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get_all( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[EnvironmentVariableAdapter]: + """Get all environment variables""" + result = await self._api.get_all( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, EnvironmentVariableAdapter) + + async def get_tags_for_env_var( + self, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[TagAdapter]: + """Get tags for environment variables""" + result = await self._api.get_tags_for_env_var( + name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + return convert_list_to_adapter(result, TagAdapter) + + async def put_tag_for_env_var( + self, + name: StrictStr, + tag: List[TagAdapter], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Put tag for environment variable""" + await self._api.put_tag_for_env_var( + name, + tag, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/event_execution_resource_api.py b/src/conductor/asyncio_client/adapters/api/event_execution_resource_api.py index 06bcd9c12..ddb9fc5ed 100644 --- a/src/conductor/asyncio_client/adapters/api/event_execution_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/event_execution_resource_api.py @@ -1,4 +1,70 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, List, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.extended_event_execution_adapter import ( + ExtendedEventExecutionAdapter, +) +from conductor.asyncio_client.adapters.models.search_result_handled_event_response_adapter import ( + SearchResultHandledEventResponseAdapter, +) +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter, convert_to_adapter from conductor.asyncio_client.http.api import EventExecutionResourceApi -class EventExecutionResourceApiAdapter(EventExecutionResourceApi): ... +class EventExecutionResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = EventExecutionResourceApi(api_client) + + async def get_event_handlers_for_event1( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResultHandledEventResponseAdapter: + """Get All active Event Handlers for the last 24 hours""" + result = await self.get_event_handlers_for_event1( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + return convert_to_adapter(result, SearchResultHandledEventResponseAdapter) + + async def get_event_handlers_for_event2( + self, + event: StrictStr, + var_from: StrictInt, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[ExtendedEventExecutionAdapter]: + """Get event handlers for a given event""" + result = await self.get_event_handlers_for_event2( + event=event, + var_from=var_from, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + return convert_list_to_adapter(result, ExtendedEventExecutionAdapter) diff --git a/src/conductor/asyncio_client/adapters/api/event_resource_api.py b/src/conductor/asyncio_client/adapters/api/event_resource_api.py index 24f6f70d7..7096a1487 100644 --- a/src/conductor/asyncio_client/adapters/api/event_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/event_resource_api.py @@ -1,4 +1,404 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, List, Optional, Tuple, Union + +from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.connectivity_test_input_adapter import ( + ConnectivityTestInputAdapter, +) +from conductor.asyncio_client.adapters.models.connectivity_test_result_adapter import ( + ConnectivityTestResultAdapter, +) +from conductor.asyncio_client.adapters.models.event_handler_adapter import EventHandlerAdapter +from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter, convert_to_adapter from conductor.asyncio_client.http.api import EventResourceApi -class EventResourceApiAdapter(EventResourceApi): ... +class EventResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = EventResourceApi(api_client) + + async def add_event_handler( + self, + event_handler: List[EventHandlerAdapter], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Add a new event handler""" + await self._api.add_event_handler( + event_handler, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_queue_config( + self, + queue_type: StrictStr, + queue_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete queue config""" + await self._api.delete_queue_config( + queue_type, + queue_name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_tag_for_event_handler( + self, + name: StrictStr, + tag: List[TagAdapter], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete a tag for event handler""" + await self._api.delete_tag_for_event_handler( + name, + tag, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get_event_handler_by_name( + self, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> EventHandlerAdapter: + """Get event handler by name""" + result = await self._api.get_event_handler_by_name( + name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, EventHandlerAdapter) + + async def get_event_handlers( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[EventHandlerAdapter]: + """Get all event handlers""" + result = await self._api.get_event_handlers( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, EventHandlerAdapter) + + async def get_event_handlers_for_event( + self, + event: StrictStr, + active_only: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[EventHandlerAdapter]: + """Get event handlers for a given event""" + result = await self._api.get_event_handlers_for_event( + event, + active_only, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, EventHandlerAdapter) + + async def get_queue_config( + self, + queue_type: StrictStr, + queue_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, object]: + """Get queue config""" + return await self._api.get_queue_config( + queue_type, + queue_name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get_queue_names( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, str]: + """Get all queue configs""" + return await self._api.get_queue_names( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get_tags_for_event_handler( + self, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[TagAdapter]: + """Get tags for event handler""" + result = await self._api.get_tags_for_event_handler( + name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, TagAdapter) + + async def handle_incoming_event( + self, + request_body: Dict[str, Dict[str, Any]], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Handle an incoming event""" + await self._api.handle_incoming_event( + request_body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def put_queue_config( + self, + queue_type: StrictStr, + queue_name: StrictStr, + body: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """(Deprecated) Put queue config""" + await self._api.put_queue_config( + queue_type, + queue_name, + body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def put_tag_for_event_handler( + self, + name: StrictStr, + tag: List[TagAdapter], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Put a tag for event handler""" + await self._api.put_tag_for_event_handler( + name, + tag, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def remove_event_handler_status( + self, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Remove an event handler""" + await self._api.remove_event_handler_status( + name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def test( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> EventHandlerAdapter: + """Get event handler by name""" + result = await self._api.test( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, EventHandlerAdapter) + + async def test_connectivity( + self, + connectivity_test_input: ConnectivityTestInputAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ConnectivityTestResultAdapter: + """Test connectivity for a given queue using a workflow with EVENT task and an EventHandler""" + result = await self._api.test_connectivity( + connectivity_test_input, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, ConnectivityTestResultAdapter) + + async def update_event_handler( + self, + event_handler: EventHandlerAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Update an existing event handler""" + await self._api.update_event_handler( + event_handler, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/group_resource_api.py b/src/conductor/asyncio_client/adapters/api/group_resource_api.py index 0f07e506d..a94677123 100644 --- a/src/conductor/asyncio_client/adapters/api/group_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/group_resource_api.py @@ -4,14 +4,24 @@ from pydantic import Field, StrictFloat, StrictInt, StrictStr +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.granted_access_response_adapter import ( + GrantedAccessResponseAdapter, +) from conductor.asyncio_client.adapters.models.group_adapter import GroupAdapter from conductor.asyncio_client.adapters.models.upsert_group_request_adapter import ( UpsertGroupRequestAdapter, ) +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter, convert_to_adapter from conductor.asyncio_client.http.api import GroupResourceApi -class GroupResourceApiAdapter(GroupResourceApi): +class GroupResourceApiAdapter: + """Adapter for GroupResourceApi that converts between generated models and adapters.""" + + def __init__(self, api_client: ApiClient): + self._api = GroupResourceApi(api_client) + async def list_groups( self, _request_timeout: Union[ @@ -24,14 +34,15 @@ async def list_groups( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[GroupAdapter]: - result = await super().list_groups( + """List all groups, returning GroupAdapter instances.""" + result = await self._api.list_groups( _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, GroupAdapter) async def get_group( self, @@ -45,8 +56,9 @@ async def get_group( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> GroupAdapter: - result = await super().get_group( + ) -> object: + """Get a group by ID, returning a GroupAdapter instance.""" + return await self._api.get_group( id, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -54,7 +66,6 @@ async def get_group( _headers=_headers, _host_index=_host_index, ) - return result async def upsert_group( self, @@ -69,8 +80,9 @@ async def upsert_group( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> GroupAdapter: - result = await super().upsert_group( + ) -> object: + """Create or update a group, returning a GroupAdapter instance.""" + return await self._api.upsert_group( id, upsert_group_request, _request_timeout=_request_timeout, @@ -80,4 +92,172 @@ async def upsert_group( _host_index=_host_index, ) - return result + async def delete_group( + self, + id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Delete a group by ID.""" + return await self._api.delete_group( + id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def add_user_to_group( + self, + group_id: StrictStr, + user_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Add a user to a group.""" + return await self._api.add_user_to_group( + group_id, + user_id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def remove_user_from_group( + self, + group_id: StrictStr, + user_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Remove a user from a group.""" + return await self._api.remove_user_from_group( + group_id, + user_id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def add_users_to_group( + self, + group_id: StrictStr, + request_body: List[StrictStr], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Add multiple users to a group.""" + await self._api.add_users_to_group( + group_id, + request_body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def remove_users_from_group( + self, + group_id: StrictStr, + request_body: List[StrictStr], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Remove multiple users from a group.""" + await self._api.remove_users_from_group( + group_id, + request_body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get_users_in_group( + self, + id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Get users in a group.""" + return await self._api.get_users_in_group( + id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get_granted_permissions1( + self, + group_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GrantedAccessResponseAdapter: + """Get granted permissions for a group.""" + result = await self._api.get_granted_permissions1( + group_id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, GrantedAccessResponseAdapter) diff --git a/src/conductor/asyncio_client/adapters/api/health_check_resource_api.py b/src/conductor/asyncio_client/adapters/api/health_check_resource_api.py index f44cde8db..c9ef2c156 100644 --- a/src/conductor/asyncio_client/adapters/api/health_check_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/health_check_resource_api.py @@ -1,4 +1,34 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.http.api import HealthCheckResourceApi -class HealthCheckResourceApiAdapter(HealthCheckResourceApi): ... +class HealthCheckResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = HealthCheckResourceApi(api_client) + + async def do_check( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, object]: + """Check the health of the API""" + return await self._api.do_check( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/incoming_webhook_resource_api.py b/src/conductor/asyncio_client/adapters/api/incoming_webhook_resource_api.py index 4a91fcef6..231324c20 100644 --- a/src/conductor/asyncio_client/adapters/api/incoming_webhook_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/incoming_webhook_resource_api.py @@ -1,4 +1,65 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.http.api import IncomingWebhookResourceApi -class IncomingWebhookResourceApiAdapter(IncomingWebhookResourceApi): ... +class IncomingWebhookResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = IncomingWebhookResourceApi(api_client) + + async def handle_webhook( + self, + id: StrictStr, + request_params: Dict[str, Dict[str, Any]], + body: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Handle webhook""" + return await self._api.handle_webhook( + id, + request_params, + body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def handle_webhook1( + self, + id: StrictStr, + request_params: Dict[str, Dict[str, Any]], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Handle webhook""" + return await self._api.handle_webhook1( + id, + request_params, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/integration_resource_api.py b/src/conductor/asyncio_client/adapters/api/integration_resource_api.py index eb8b3785a..d2511fda7 100644 --- a/src/conductor/asyncio_client/adapters/api/integration_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/integration_resource_api.py @@ -4,16 +4,102 @@ from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.event_log_adapter import EventLogAdapter from conductor.asyncio_client.adapters.models.integration_adapter import IntegrationAdapter from conductor.asyncio_client.adapters.models.integration_api_adapter import IntegrationApiAdapter +from conductor.asyncio_client.adapters.models.integration_api_update_adapter import ( + IntegrationApiUpdateAdapter, +) from conductor.asyncio_client.adapters.models.integration_def_adapter import IntegrationDefAdapter +from conductor.asyncio_client.adapters.models.integration_update_adapter import ( + IntegrationUpdateAdapter, +) from conductor.asyncio_client.adapters.models.message_template_adapter import MessageTemplateAdapter from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter, convert_to_adapter from conductor.asyncio_client.http.api import IntegrationResourceApi -class IntegrationResourceApiAdapter(IntegrationResourceApi): +class IntegrationResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = IntegrationResourceApi(api_client) + + async def associate_prompt_with_integration( + self, + integration_provider: StrictStr, + integration_name: StrictStr, + prompt_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Associate a prompt with an integration""" + await self._api.associate_prompt_with_integration( + integration_provider, + integration_name, + prompt_name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_integration_api( + self, + name: StrictStr, + integration_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete an Integration""" + await self._api.delete_integration_api( + name, + integration_name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_integration_provider( + self, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete an Integration Provider""" + await self._api.delete_integration_provider( + name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def get_integration_provider( self, name: StrictStr, @@ -27,7 +113,8 @@ async def get_integration_provider( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> IntegrationAdapter: - result = await super().get_integration_provider( + """Get a specific integration provider""" + result = await self._api.get_integration_provider( name, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -35,7 +122,7 @@ async def get_integration_provider( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, IntegrationAdapter) async def get_integration_providers( self, @@ -51,7 +138,8 @@ async def get_integration_providers( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[IntegrationAdapter]: - result = await super().get_integration_providers( + """Get all Integrations Providers""" + result = await self._api.get_integration_providers( category, active_only, _request_timeout=_request_timeout, @@ -60,7 +148,7 @@ async def get_integration_providers( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, IntegrationAdapter) async def get_integration_provider_defs( self, @@ -74,14 +162,15 @@ async def get_integration_provider_defs( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[IntegrationDefAdapter]: - result = await super().get_integration_provider_defs( + """Get integration provider definitions""" + result = await self._api.get_integration_provider_defs( _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, IntegrationDefAdapter) async def get_integration_api( self, @@ -97,7 +186,8 @@ async def get_integration_api( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> IntegrationApiAdapter: - result = await super().get_integration_api( + """Get a specific integration api""" + result = await self._api.get_integration_api( name, integration_name, _request_timeout=_request_timeout, @@ -106,7 +196,7 @@ async def get_integration_api( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, IntegrationApiAdapter) async def get_integration_apis( self, @@ -122,7 +212,8 @@ async def get_integration_apis( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[IntegrationApiAdapter]: - result = await super().get_integration_apis( + """Get all integration apis""" + result = await self._api.get_integration_apis( name, active_only, _request_timeout=_request_timeout, @@ -132,7 +223,7 @@ async def get_integration_apis( _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, IntegrationApiAdapter) async def get_integration_available_apis( self, @@ -147,7 +238,8 @@ async def get_integration_available_apis( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[str]: - result = await super().get_integration_available_apis( + """Get all integration available apis""" + result = await self._api.get_integration_available_apis( name, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -171,7 +263,8 @@ async def save_all_integrations( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().save_all_integrations( + """Save all Integrations""" + await self._api.save_all_integrations( integration, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -180,6 +273,58 @@ async def save_all_integrations( _host_index=_host_index, ) + async def save_integration_api( + self, + name: StrictStr, + integration_name: StrictStr, + integration_api_update: IntegrationApiUpdateAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Create or Update Integration""" + await self._api.save_integration_api( + name, + integration_name, + integration_api_update, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def save_integration_provider( + self, + name: StrictStr, + integration_update: IntegrationUpdateAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Create or Update Integration Provider""" + await self._api.save_integration_provider( + name, + integration_update, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def get_all_integrations( self, category: Optional[StrictStr] = None, @@ -194,7 +339,8 @@ async def get_all_integrations( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[IntegrationAdapter]: - result = await super().get_all_integrations( + """Get all integrations""" + result = await self._api.get_all_integrations( category, active_only, _request_timeout=_request_timeout, @@ -204,7 +350,7 @@ async def get_all_integrations( _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, IntegrationAdapter) async def get_providers_and_integrations( self, @@ -220,7 +366,8 @@ async def get_providers_and_integrations( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[str]: - return await super().get_providers_and_integrations( + """Get providers and integrations""" + result = await self._api.get_providers_and_integrations( type, active_only, _request_timeout=_request_timeout, @@ -229,6 +376,7 @@ async def get_providers_and_integrations( _headers=_headers, _host_index=_host_index, ) + return result async def put_tag_for_integration( self, @@ -245,7 +393,8 @@ async def put_tag_for_integration( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().put_tag_for_integration( + """Put a tag to Integration""" + await self._api.put_tag_for_integration( name, integration_name, tag, @@ -270,7 +419,8 @@ async def get_tags_for_integration( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[TagAdapter]: - result = await super().get_tags_for_integration( + """Get tags for integration""" + result = await self._api.get_tags_for_integration( name, integration_name, _request_timeout=_request_timeout, @@ -280,7 +430,7 @@ async def get_tags_for_integration( _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, TagAdapter) async def delete_tag_for_integration( self, @@ -297,7 +447,8 @@ async def delete_tag_for_integration( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().delete_tag_for_integration( + """Delete a tag for Integration""" + await self._api.delete_tag_for_integration( name, integration_name, tag, @@ -322,7 +473,8 @@ async def put_tag_for_integration_provider( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().put_tag_for_integration_provider( + """Put a tag to Integration Provider""" + await self._api.put_tag_for_integration_provider( name, tag, _request_timeout=_request_timeout, @@ -345,7 +497,8 @@ async def get_tags_for_integration_provider( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[TagAdapter]: - result = await super().get_tags_for_integration_provider( + """Get tags for integration provider""" + result = await self._api.get_tags_for_integration_provider( name, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -353,7 +506,7 @@ async def get_tags_for_integration_provider( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, TagAdapter) async def delete_tag_for_integration_provider( self, @@ -369,7 +522,8 @@ async def delete_tag_for_integration_provider( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().delete_tag_for_integration_provider( + """Delete a tag for Integration Provider""" + await self._api.delete_tag_for_integration_provider( name, tag, _request_timeout=_request_timeout, @@ -392,7 +546,8 @@ async def get_token_usage_for_integration_provider( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> Dict[str, str]: - return await super().get_token_usage_for_integration_provider( + """Get Token Usage by Integration Provider""" + result = await self._api.get_token_usage_for_integration_provider( name, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -400,6 +555,7 @@ async def get_token_usage_for_integration_provider( _headers=_headers, _host_index=_host_index, ) + return result async def get_prompts_with_integration( self, @@ -415,7 +571,8 @@ async def get_prompts_with_integration( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[MessageTemplateAdapter]: - result = await super().get_prompts_with_integration( + """Get prompts with integration""" + result = await self._api.get_prompts_with_integration( integration_provider, integration_name, _request_timeout=_request_timeout, @@ -424,7 +581,7 @@ async def get_prompts_with_integration( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, MessageTemplateAdapter) async def record_event_stats( self, @@ -440,7 +597,8 @@ async def record_event_stats( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().record_event_stats( + """Record Event Stats""" + await self._api.record_event_stats( type, event_log, _request_timeout=_request_timeout, @@ -449,3 +607,56 @@ async def record_event_stats( _headers=_headers, _host_index=_host_index, ) + + async def register_token_usage( + self, + name: StrictStr, + integration_name: StrictStr, + body: StrictInt, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Register Token Usage""" + await self._api.register_token_usage( + name, + integration_name, + body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get_token_usage_for_integration( + self, + name: StrictStr, + integration_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> int: + """Get Token Usage by Integration""" + result = await self._api.get_token_usage_for_integration( + name, + integration_name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return result diff --git a/src/conductor/asyncio_client/adapters/api/limits_resource_api.py b/src/conductor/asyncio_client/adapters/api/limits_resource_api.py index 44eb8e24a..6184b1d72 100644 --- a/src/conductor/asyncio_client/adapters/api/limits_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/limits_resource_api.py @@ -1,4 +1,34 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.http.api import LimitsResourceApi -class LimitsResourceApiAdapter(LimitsResourceApi): ... +class LimitsResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = LimitsResourceApi(api_client) + + async def get1( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, object]: + """Get limits""" + return await self._api.get1( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/metadata_resource_api.py b/src/conductor/asyncio_client/adapters/api/metadata_resource_api.py index e8be44510..d46dba475 100644 --- a/src/conductor/asyncio_client/adapters/api/metadata_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/metadata_resource_api.py @@ -4,15 +4,50 @@ from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.extended_task_def_adapter import ( + ExtendedTaskDefAdapter, +) from conductor.asyncio_client.adapters.models.extended_workflow_def_adapter import ( ExtendedWorkflowDefAdapter, ) from conductor.asyncio_client.adapters.models.task_def_adapter import TaskDefAdapter from conductor.asyncio_client.adapters.models.workflow_def_adapter import WorkflowDefAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter, convert_to_adapter from conductor.asyncio_client.http.api import MetadataResourceApi -class MetadataResourceApiAdapter(MetadataResourceApi): +class MetadataResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = MetadataResourceApi(api_client) + + async def create( + self, + extended_workflow_def: ExtendedWorkflowDefAdapter, + overwrite: Optional[StrictBool] = None, + new_version: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Create a new workflow definition""" + return await self._api.create( + extended_workflow_def, + overwrite, + new_version, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def get_task_def( self, tasktype: StrictStr, @@ -26,8 +61,9 @@ async def get_task_def( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> TaskDefAdapter: - result = await super().get_task_def( + ) -> object: + """Get a task definition""" + return await self._api.get_task_def( tasktype, metadata, _request_timeout=_request_timeout, @@ -36,7 +72,6 @@ async def get_task_def( _headers=_headers, _host_index=_host_index, ) - return result async def get_task_defs( self, @@ -54,7 +89,8 @@ async def get_task_defs( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[TaskDefAdapter]: - result = await super().get_task_defs( + """Get all task definitions""" + result = await self._api.get_task_defs( access, metadata, tag_key, @@ -65,7 +101,7 @@ async def get_task_defs( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, TaskDefAdapter) async def update( self, @@ -82,7 +118,8 @@ async def update( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> object: - result = await super().update( + """Create or update workflow definition(s)""" + result = await self._api.update( extended_workflow_def, overwrite, new_version, @@ -94,6 +131,50 @@ async def update( ) return result + async def update_task_def( + self, + extended_task_def: ExtendedTaskDefAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Update a task definition""" + return await self._api.update_task_def( + extended_task_def, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def upload_workflows_and_tasks_definitions_to_s3( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Upload all workflows and tasks definitions to Object storage if configured""" + return await self._api.upload_workflows_and_tasks_definitions_to_s3( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def get( self, name: StrictStr, @@ -109,17 +190,18 @@ async def get( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> WorkflowDefAdapter: - result = await super().get( - name, - version, - metadata, + """Get a workflow definition""" + result = await self._api.get( + name=name, + version=version, + metadata=metadata, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, WorkflowDefAdapter) async def get_workflow_defs( self, @@ -139,7 +221,8 @@ async def get_workflow_defs( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[WorkflowDefAdapter]: - result = await super().get_workflow_defs( + """Get the workflow definitions""" + result = await self._api.get_workflow_defs( access, metadata, tag_key, @@ -152,4 +235,75 @@ async def get_workflow_defs( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, WorkflowDefAdapter) + + async def register_task_def( + self, + extended_task_def: List[ExtendedTaskDefAdapter], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Register a task definition""" + return await self._api.register_task_def( + extended_task_def, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def unregister_task_def( + self, + tasktype: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Unregister a task definition""" + return await self._api.unregister_task_def( + tasktype, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def unregister_workflow_def( + self, + name: StrictStr, + version: StrictInt, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Unregister a workflow definition""" + return await self._api.unregister_workflow_def( + name, + version, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/metrics_resource_api.py b/src/conductor/asyncio_client/adapters/api/metrics_resource_api.py index 4dad395e6..66a120c04 100644 --- a/src/conductor/asyncio_client/adapters/api/metrics_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/metrics_resource_api.py @@ -1,4 +1,42 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.http.api import MetricsResourceApi -class MetricsResourceApiAdapter(MetricsResourceApi): ... +class MetricsResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = MetricsResourceApi(api_client) + + async def prometheus_task_metrics( + self, + task_name: StrictStr, + start: StrictStr, + end: StrictStr, + step: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, object]: + """Get prometheus task metrics""" + return await self._api.prometheus_task_metrics( + task_name=task_name, + start=start, + end=end, + step=step, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/metrics_token_resource_api.py b/src/conductor/asyncio_client/adapters/api/metrics_token_resource_api.py index 49203a862..867fcec45 100644 --- a/src/conductor/asyncio_client/adapters/api/metrics_token_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/metrics_token_resource_api.py @@ -1,4 +1,37 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.metrics_token_adapter import MetricsTokenAdapter +from conductor.asyncio_client.adapters.utils import convert_to_adapter from conductor.asyncio_client.http.api import MetricsTokenResourceApi -class MetricsTokenResourceApiAdapter(MetricsTokenResourceApi): ... +class MetricsTokenResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = MetricsTokenResourceApi(api_client) + + async def token( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> MetricsTokenAdapter: + """Get metrics token""" + result = await self._api.token( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, MetricsTokenAdapter) diff --git a/src/conductor/asyncio_client/adapters/api/prompt_resource_api.py b/src/conductor/asyncio_client/adapters/api/prompt_resource_api.py index 14bb9eb95..88ec08278 100644 --- a/src/conductor/asyncio_client/adapters/api/prompt_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/prompt_resource_api.py @@ -4,14 +4,22 @@ from pydantic import Field, StrictFloat, StrictInt, StrictStr +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.message_template_adapter import ( MessageTemplateAdapter, ) +from conductor.asyncio_client.adapters.models.prompt_template_test_request_adapter import ( + PromptTemplateTestRequestAdapter, +) from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter, convert_to_adapter from conductor.asyncio_client.http.api import PromptResourceApi -class PromptResourceApiAdapter(PromptResourceApi): +class PromptResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = PromptResourceApi(api_client) + async def get_message_template( self, name: StrictStr, @@ -25,7 +33,8 @@ async def get_message_template( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> MessageTemplateAdapter: - result = await super().get_message_template( + """Get a message template by name""" + result = await self._api.get_message_template( name, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -33,7 +42,7 @@ async def get_message_template( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, MessageTemplateAdapter) async def get_message_templates( self, @@ -47,14 +56,15 @@ async def get_message_templates( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[MessageTemplateAdapter]: - result = await super().get_message_templates( + """Get all message templates""" + result = await self._api.get_message_templates( _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, MessageTemplateAdapter) async def create_message_templates( self, @@ -69,7 +79,8 @@ async def create_message_templates( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().create_message_templates( + """Create multiple message templates in bulk""" + return await self._api.create_message_templates( message_template, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -92,7 +103,8 @@ async def put_tag_for_prompt_template( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().put_tag_for_prompt_template( + """Put a tag to Prompt Template""" + await self._api.put_tag_for_prompt_template( name, tag, _request_timeout=_request_timeout, @@ -115,7 +127,8 @@ async def get_tags_for_prompt_template( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[TagAdapter]: - result = await super().get_tags_for_prompt_template( + """Get tags for a prompt template""" + result = await self._api.get_tags_for_prompt_template( name, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -123,7 +136,7 @@ async def get_tags_for_prompt_template( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, TagAdapter) async def delete_tag_for_prompt_template( self, @@ -139,9 +152,86 @@ async def delete_tag_for_prompt_template( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().delete_tag_for_prompt_template( + """Delete a tag from a prompt template""" + return await self._api.delete_tag_for_prompt_template( name, tag, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_message_template( + self, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete a message template""" + await self._api.delete_message_template( + name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def save_message_template( + self, + name: StrictStr, + description: StrictStr, + body: StrictStr, + models: Optional[List[StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Create or Update a template""" + await self._api.save_message_template( + name, + description, + body, + models, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def test_message_template( + self, + prompt_template_test_request: PromptTemplateTestRequestAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Test Prompt Template""" + return await self._api.test_message_template( + prompt_template_test_request, + _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, diff --git a/src/conductor/asyncio_client/adapters/api/queue_admin_resource_api.py b/src/conductor/asyncio_client/adapters/api/queue_admin_resource_api.py index 9b04cc6e7..0fbd6e31e 100644 --- a/src/conductor/asyncio_client/adapters/api/queue_admin_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/queue_admin_resource_api.py @@ -1,4 +1,55 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.http.api import QueueAdminResourceApi -class QueueAdminResourceApiAdapter(QueueAdminResourceApi): ... +class QueueAdminResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = QueueAdminResourceApi(api_client) + + async def names( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, str]: + """Get queue names""" + return await self._api.names( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def size1( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, Dict[str, int]]: + """Get queue size""" + return await self._api.size1( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/scheduler_resource_api.py b/src/conductor/asyncio_client/adapters/api/scheduler_resource_api.py index f12a351fc..9b200c162 100644 --- a/src/conductor/asyncio_client/adapters/api/scheduler_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/scheduler_resource_api.py @@ -4,6 +4,7 @@ from pydantic import Field, StrictFloat, StrictInt, StrictStr +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.search_result_workflow_schedule_execution_model_adapter import ( SearchResultWorkflowScheduleExecutionModelAdapter, ) @@ -14,10 +15,37 @@ from conductor.asyncio_client.adapters.models.workflow_schedule_model_adapter import ( WorkflowScheduleModelAdapter, ) +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter, convert_to_adapter from conductor.asyncio_client.http.api import SchedulerResourceApi -class SchedulerResourceApiAdapter(SchedulerResourceApi): +class SchedulerResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = SchedulerResourceApi(api_client) + + async def delete_schedule( + self, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Delete a workflow schedule by name""" + return await self._api.delete_schedule( + name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def get_schedule( self, name: StrictStr, @@ -31,7 +59,8 @@ async def get_schedule( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> WorkflowScheduleAdapter: - result = await super().get_schedule( + """Get a workflow schedule by name""" + result = await self._api.get_schedule( name, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -39,7 +68,7 @@ async def get_schedule( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, WorkflowScheduleAdapter) async def get_all_schedules( self, @@ -54,7 +83,8 @@ async def get_all_schedules( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[WorkflowScheduleModelAdapter]: - result = await super().get_all_schedules( + """Get all workflow schedules, optionally filtered by workflow name""" + result = await self._api.get_all_schedules( workflow_name, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -62,7 +92,7 @@ async def get_all_schedules( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, WorkflowScheduleModelAdapter) async def search_v2( self, @@ -81,7 +111,8 @@ async def search_v2( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> SearchResultWorkflowScheduleExecutionModelAdapter: - result = await super().search_v2( + """Search for workflow schedule executions""" + result = await self._api.search_v2( start, size, sort, @@ -93,7 +124,7 @@ async def search_v2( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, SearchResultWorkflowScheduleExecutionModelAdapter) async def get_schedules_by_tag( self, @@ -108,7 +139,8 @@ async def get_schedules_by_tag( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[WorkflowScheduleModelAdapter]: - result = await super().get_schedules_by_tag( + """Get schedules by tag""" + result = await self._api.get_schedules_by_tag( tag, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -116,7 +148,7 @@ async def get_schedules_by_tag( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, WorkflowScheduleModelAdapter) async def put_tag_for_schedule( self, @@ -132,7 +164,8 @@ async def put_tag_for_schedule( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().put_tag_for_schedule( + """Put a tag to schedule""" + await self._api.put_tag_for_schedule( name, tag, _request_timeout=_request_timeout, @@ -155,7 +188,8 @@ async def get_tags_for_schedule( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[TagAdapter]: - result = await super().get_tags_for_schedule( + """Get tags for a schedule""" + result = await self._api.get_tags_for_schedule( name, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -163,7 +197,7 @@ async def get_tags_for_schedule( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, TagAdapter) async def delete_tag_for_schedule( self, @@ -179,7 +213,8 @@ async def delete_tag_for_schedule( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().delete_tag_for_schedule( + """Delete a tag from a schedule""" + return await self._api.delete_tag_for_schedule( name, tag, _request_timeout=_request_timeout, @@ -188,3 +223,164 @@ async def delete_tag_for_schedule( _headers=_headers, _host_index=_host_index, ) + + async def get_next_few_schedules( + self, + cron_expression: StrictStr, + schedule_start_time: Optional[StrictInt] = None, + schedule_end_time: Optional[StrictInt] = None, + limit: Optional[StrictInt] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[int]: + """Get the next few schedules for a cron expression""" + return await self._api.get_next_few_schedules( + cron_expression, + schedule_start_time, + schedule_end_time, + limit, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def pause_all_schedules( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, object]: + """Pause all scheduling in a single conductor server instance (for debugging only)""" + return await self._api.pause_all_schedules( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def pause_schedule( + self, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Pauses an existing schedule by name""" + return await self._api.pause_schedule( + name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def requeue_all_execution_records( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, object]: + """Requeue all execution records""" + return await self._api.requeue_all_execution_records( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def resume_all_schedules( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, object]: + """Resume all scheduling in a single conductor server instance (for debugging only)""" + return await self._api.resume_all_schedules( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def resume_schedule( + self, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Resume a paused schedule by name""" + return await self._api.resume_schedule( + name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def save_schedule( + self, + save_schedule_request: WorkflowScheduleModelAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Save a schedule""" + return await self._api.save_schedule( + save_schedule_request, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/schema_resource_api.py b/src/conductor/asyncio_client/adapters/api/schema_resource_api.py index 54642b57d..fd9781edd 100644 --- a/src/conductor/asyncio_client/adapters/api/schema_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/schema_resource_api.py @@ -4,11 +4,64 @@ from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.schema_def_adapter import SchemaDefAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter, convert_to_adapter from conductor.asyncio_client.http.api import SchemaResourceApi -class SchemaResourceApiAdapter(SchemaResourceApi): +class SchemaResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = SchemaResourceApi(api_client) + + async def delete_schema_by_name( + self, + name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete all versions of schema by name""" + return await self._api.delete_schema_by_name( + name, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_schema_by_name_and_version( + self, + name: StrictStr, + version: StrictInt, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete a version of schema by name and version""" + return await self._api.delete_schema_by_name_and_version( + name, + version, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def save( self, schema_def: List[SchemaDefAdapter], @@ -23,7 +76,8 @@ async def save( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().save( + """Save a schema""" + await self._api.save( schema_def, new_version, _request_timeout=_request_timeout, @@ -47,7 +101,8 @@ async def get_schema_by_name_and_version( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> SchemaDefAdapter: - result = await super().get_schema_by_name_and_version( + """Get a schema by name and version""" + result = await self._api.get_schema_by_name_and_version( name, version, _request_timeout=_request_timeout, @@ -56,7 +111,7 @@ async def get_schema_by_name_and_version( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, SchemaDefAdapter) async def get_all_schemas( self, @@ -70,11 +125,12 @@ async def get_all_schemas( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[SchemaDefAdapter]: - result = await super().get_all_schemas( + """Get all schemas""" + result = await self._api.get_all_schemas( _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, SchemaDefAdapter) diff --git a/src/conductor/asyncio_client/adapters/api/secret_resource_api.py b/src/conductor/asyncio_client/adapters/api/secret_resource_api.py index dc23eb903..9228d8600 100644 --- a/src/conductor/asyncio_client/adapters/api/secret_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/secret_resource_api.py @@ -4,12 +4,82 @@ from pydantic import Field, StrictFloat, StrictInt, StrictStr +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.extended_secret_adapter import ExtendedSecretAdapter from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter from conductor.asyncio_client.http.api import SecretResourceApi -class SecretResourceApiAdapter(SecretResourceApi): +class SecretResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = SecretResourceApi(api_client) + + async def clear_local_cache( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, str]: + """Clear local cache""" + return await self._api.clear_local_cache( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def clear_redis_cache( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, str]: + """Clear redis cache""" + return await self._api.clear_redis_cache( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_secret( + self, + key: Annotated[str, Field(strict=True)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Delete a secret value by key""" + return await self._api.delete_secret( + key, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def secret_exists( self, key: Annotated[str, Field(strict=True)], @@ -22,8 +92,9 @@ async def secret_exists( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> bool: - result = await super().secret_exists( + ) -> object: + """Check if a secret exists by key""" + return await self._api.secret_exists( key, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -31,7 +102,6 @@ async def secret_exists( _headers=_headers, _host_index=_host_index, ) - return result async def list_all_secret_names( self, @@ -45,7 +115,8 @@ async def list_all_secret_names( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[str]: - return await super().list_all_secret_names( + """List all secret names""" + return await self._api.list_all_secret_names( _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, @@ -65,14 +136,40 @@ async def list_secrets_with_tags_that_user_can_grant_access_to( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[ExtendedSecretAdapter]: - result = await super().list_secrets_with_tags_that_user_can_grant_access_to( + """List secrets with tags that the current user can grant access to""" + result = await self._api.list_secrets_with_tags_that_user_can_grant_access_to( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, ExtendedSecretAdapter) + + async def put_secret( + self, + key: Annotated[str, Field(strict=True)], + body: Annotated[str, Field(min_length=0, strict=True, max_length=65535)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Put a secret value by key""" + return await self._api.put_secret( + key, + body, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) - return result async def put_tag_for_secret( self, @@ -88,7 +185,8 @@ async def put_tag_for_secret( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().put_tag_for_secret( + """Put a tag for a secret""" + await self._api.put_tag_for_secret( key, tag, _request_timeout=_request_timeout, @@ -111,7 +209,8 @@ async def get_tags( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[TagAdapter]: - result = await super().get_tags( + """Get tags for a secret""" + result = await self._api.get_tags( key, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -119,7 +218,7 @@ async def get_tags( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, TagAdapter) async def delete_tag_for_secret( self, @@ -135,7 +234,8 @@ async def delete_tag_for_secret( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - return await super().delete_tag_for_secret( + """Delete a tag for a secret""" + return await self._api.delete_tag_for_secret( key, tag, _request_timeout=_request_timeout, @@ -144,3 +244,48 @@ async def delete_tag_for_secret( _headers=_headers, _host_index=_host_index, ) + + async def get_secret( + self, + key: Annotated[str, Field(strict=True)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get a secret value by key""" + result = await self._api.get_secret( + key, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return result + + async def list_secrets_that_user_can_grant_access_to( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[str]: + """List all secret names user can grant access to""" + return await self._api.list_secrets_that_user_can_grant_access_to( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/tags_api.py b/src/conductor/asyncio_client/adapters/api/tags_api.py index 90d9c05b3..a3f0ac022 100644 --- a/src/conductor/asyncio_client/adapters/api/tags_api.py +++ b/src/conductor/asyncio_client/adapters/api/tags_api.py @@ -4,11 +4,138 @@ from pydantic import Field, StrictFloat, StrictInt, StrictStr +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter from conductor.asyncio_client.http.api import TagsApi -class TagsApiAdapter(TagsApi): +class TagsApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = TagsApi(api_client) + + async def add_task_tag( + self, + task_name: StrictStr, + tag: TagAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Add a tag to a task""" + return await self._api.add_task_tag( + task_name, + tag, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def add_workflow_tag( + self, + name: StrictStr, + tag: TagAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Add a tag to a workflow""" + return await self._api.add_workflow_tag( + name, + tag, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_task_tag( + self, + task_name: StrictStr, + tag: TagAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Delete a tag from a task""" + return await self._api.delete_task_tag( + task_name, + tag, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_workflow_tag( + self, + name: StrictStr, + tag: TagAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Delete a tag from a workflow""" + return await self._api.delete_workflow_tag( + name, + tag, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get_tags1( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[TagAdapter]: + """List all tags""" + result = await self._api.get_tags1( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, TagAdapter) + async def get_workflow_tags( self, name: StrictStr, @@ -22,7 +149,8 @@ async def get_workflow_tags( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[TagAdapter]: - result = await super().get_workflow_tags( + """Get the tags for a workflow""" + result = await self._api.get_workflow_tags( name, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -30,7 +158,7 @@ async def get_workflow_tags( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, TagAdapter) async def set_workflow_tags( self, @@ -46,7 +174,8 @@ async def set_workflow_tags( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> object: - result = await super().set_workflow_tags( + """Set the tags for a workflow""" + return await self._api.set_workflow_tags( name, tag, _request_timeout=_request_timeout, @@ -55,7 +184,6 @@ async def set_workflow_tags( _headers=_headers, _host_index=_host_index, ) - return result async def get_task_tags( self, @@ -70,7 +198,8 @@ async def get_task_tags( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[TagAdapter]: - result = await super().get_task_tags( + """Get the tags for a task""" + result = await self._api.get_task_tags( task_name, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -78,7 +207,7 @@ async def get_task_tags( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, TagAdapter) async def set_task_tags( self, @@ -94,7 +223,8 @@ async def set_task_tags( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> object: - result = await super().set_task_tags( + """Set the tags for a task""" + return await self._api.set_task_tags( task_name, tag, _request_timeout=_request_timeout, @@ -103,4 +233,3 @@ async def set_task_tags( _headers=_headers, _host_index=_host_index, ) - return result diff --git a/src/conductor/asyncio_client/adapters/api/task_resource_api.py b/src/conductor/asyncio_client/adapters/api/task_resource_api.py index 52cc2c36b..4ba1c574f 100644 --- a/src/conductor/asyncio_client/adapters/api/task_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/task_resource_api.py @@ -2,19 +2,67 @@ from typing import Annotated, Any, Dict, List, Optional, Tuple, Union -from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call +from pydantic import Field, StrictFloat, StrictInt, StrictStr +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.poll_data_adapter import PollDataAdapter from conductor.asyncio_client.adapters.models.search_result_task_summary_adapter import ( SearchResultTaskSummaryAdapter, ) from conductor.asyncio_client.adapters.models.task_adapter import TaskAdapter from conductor.asyncio_client.adapters.models.task_exec_log_adapter import TaskExecLogAdapter +from conductor.asyncio_client.adapters.models.task_result_adapter import TaskResultAdapter from conductor.asyncio_client.adapters.models.workflow_adapter import WorkflowAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter, convert_to_adapter from conductor.asyncio_client.http.api import TaskResourceApi -class TaskResourceApiAdapter(TaskResourceApi): +class TaskResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = TaskResourceApi(api_client) + + async def all( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, int]: + """Get the details about each queue""" + return await self._api.all( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def all_verbose( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, Dict[str, Dict[str, int]]]: + """Get the details about each queue""" + return await self._api.all_verbose( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def poll( self, tasktype: StrictStr, @@ -30,7 +78,8 @@ async def poll( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> Optional[TaskAdapter]: - result = await super().poll( + """Poll for a task""" + result = await self._api.poll( tasktype, workerid, domain, @@ -40,7 +89,30 @@ async def poll( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, TaskAdapter) if result else None + + async def requeue_pending_task( + self, + task_type: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Requeue pending tasks""" + return await self._api.requeue_pending_task( + task_type, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) async def batch_poll( self, @@ -59,7 +131,8 @@ async def batch_poll( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[TaskAdapter]: - result = await super().batch_poll( + """Batch poll for tasks""" + result = await self._api.batch_poll( tasktype, workerid, domain, @@ -71,7 +144,40 @@ async def batch_poll( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, TaskAdapter) + + async def get_all_poll_data( + self, + worker_size: Optional[StrictInt] = None, + worker_opt: Optional[StrictStr] = None, + queue_size: Optional[StrictInt] = None, + queue_opt: Optional[StrictStr] = None, + last_poll_time_size: Optional[StrictInt] = None, + last_poll_time_opt: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, object]: + """Get the last poll data for all task types""" + return await self._api.get_all_poll_data( + worker_size, + worker_opt, + queue_size, + queue_opt, + last_poll_time_size, + last_poll_time_opt, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) async def get_task( self, @@ -86,7 +192,8 @@ async def get_task( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> TaskAdapter: - result = await super().get_task( + """Get a task""" + result = await self._api.get_task( task_id, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -94,7 +201,7 @@ async def get_task( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, TaskAdapter) async def get_poll_data( self, @@ -109,7 +216,8 @@ async def get_poll_data( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[PollDataAdapter]: - result = await super().get_poll_data( + """Get the poll data""" + result = await self._api.get_poll_data( task_type, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -117,7 +225,7 @@ async def get_poll_data( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, PollDataAdapter) async def get_task_logs( self, @@ -132,15 +240,41 @@ async def get_task_logs( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[TaskExecLogAdapter]: - result = await super().get_task_logs( + """Get the task logs""" + result = await self._api.get_task_logs( + task_id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, TaskExecLogAdapter) + + async def log( + self, + task_id: StrictStr, + body: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Log Task Execution Details""" + await self._api.log( task_id, + body, _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) - return result async def search1( self, @@ -159,7 +293,8 @@ async def search1( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> SearchResultTaskSummaryAdapter: - result = await super().search1( + """Search for tasks""" + result = await self._api.search1( start, size, sort, @@ -171,9 +306,85 @@ async def search1( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, SearchResultTaskSummaryAdapter) + + async def size( + self, + task_type: Optional[List[StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, int]: + """Get Task type queue sizes""" + return await self._api.size( + task_type, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def update_task( + self, + task_result: TaskResultAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Update a task""" + return await self._api.update_task( + task_result, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def update_task1( + self, + workflow_id: StrictStr, + task_ref_name: StrictStr, + status: StrictStr, + request_body: Dict[str, Dict[str, Any]], + workerid: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Update a task By Ref Name""" + return await self._api.update_task1( + workflow_id, + task_ref_name, + status, + request_body, + workerid, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) - @validate_call async def update_task_sync( self, workflow_id: StrictStr, @@ -191,59 +402,17 @@ async def update_task_sync( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> WorkflowAdapter: - """Update a task By Ref Name synchronously - - - :param workflow_id: (required) - :type workflow_id: str - :param task_ref_name: (required) - :type task_ref_name: str - :param status: (required) - :type status: str - :param request_body: (required) - :type request_body: Dict[str, object] - :param workerid: - :type workerid: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ - - _param = self._update_task_sync_serialize( - workflow_id=workflow_id, - task_ref_name=task_ref_name, - status=status, - request_body=request_body, - workerid=workerid, + """Update a task By Ref Name synchronously""" + result = await self._api.update_task_sync( + workflow_id, + task_ref_name, + status, + request_body, + workerid, + _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) - - _response_types_map: Dict[str, Optional[str]] = { - "200": "Workflow", - } - response_data = await self.api_client.call_api(*_param, _request_timeout=_request_timeout) - await response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data + return convert_to_adapter(result, WorkflowAdapter) diff --git a/src/conductor/asyncio_client/adapters/api/token_resource_api.py b/src/conductor/asyncio_client/adapters/api/token_resource_api.py index 52f40be20..74173e8da 100644 --- a/src/conductor/asyncio_client/adapters/api/token_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/token_resource_api.py @@ -1,4 +1,62 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, Optional, Tuple, Union + +from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.generate_token_request_adapter import ( + GenerateTokenRequestAdapter, +) from conductor.asyncio_client.http.api import TokenResourceApi -class TokenResourceApiAdapter(TokenResourceApi): ... +class TokenResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = TokenResourceApi(api_client) + + async def generate_token( + self, + generate_token_request: GenerateTokenRequestAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Generate JWT with the given access key""" + return await self._api.generate_token( + generate_token_request, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get_user_info( + self, + claims: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Get the user info from the token""" + return await self._api.get_user_info( + claims, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/user_resource_api.py b/src/conductor/asyncio_client/adapters/api/user_resource_api.py index ae25ccb96..63f8308bd 100644 --- a/src/conductor/asyncio_client/adapters/api/user_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/user_resource_api.py @@ -4,12 +4,44 @@ from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr -from conductor.asyncio_client.adapters.models import UpsertUserRequestAdapter as UpsertUserRequest +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models import UpsertUserRequestAdapter from conductor.asyncio_client.adapters.models.conductor_user_adapter import ConductorUserAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter from conductor.asyncio_client.http.api import UserResourceApi -class UserResourceApiAdapter(UserResourceApi): +class UserResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = UserResourceApi(api_client) + + async def check_permissions( + self, + user_id: StrictStr, + type: StrictStr, + id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Get the permissions this user has over workflows and tasks""" + return await self._api.check_permissions( + user_id, + type, + id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def get_granted_permissions( self, user_id: StrictStr, @@ -23,10 +55,11 @@ async def get_granted_permissions( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> object: + """Get the granted permissions for a user""" # Convert empty user_id to None to prevent sending invalid data to server if not user_id: user_id = None - return await super().get_granted_permissions( + return await self._api.get_granted_permissions( user_id, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -47,11 +80,12 @@ async def get_user( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ConductorUserAdapter: + ) -> object: + """Get a user""" # Convert empty user id to None to prevent sending invalid data to server if not id: id = None - result = await super().get_user( + return await self._api.get_user( id, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -59,12 +93,11 @@ async def get_user( _headers=_headers, _host_index=_host_index, ) - return result async def upsert_user( self, id: StrictStr, - upsert_user_request: UpsertUserRequest, + upsert_user_request: UpsertUserRequestAdapter, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -74,11 +107,12 @@ async def upsert_user( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ConductorUserAdapter: + ) -> object: + """Upsert a user""" # Convert empty user id to None to prevent sending invalid data to server if not id: id = None - result = await super().upsert_user( + return await self._api.upsert_user( id, upsert_user_request, _request_timeout=_request_timeout, @@ -87,7 +121,6 @@ async def upsert_user( _headers=_headers, _host_index=_host_index, ) - return result async def list_users( self, @@ -102,7 +135,8 @@ async def list_users( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[ConductorUserAdapter]: - result = await super().list_users( + """List users""" + result = await self._api.list_users( apps, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -110,7 +144,7 @@ async def list_users( _headers=_headers, _host_index=_host_index, ) - return result + return convert_list_to_adapter(result, ConductorUserAdapter) async def delete_user( self, @@ -125,10 +159,11 @@ async def delete_user( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> object: + """Delete a user""" # Convert empty user id to None to prevent sending invalid data to server if not id: id = None - return await super().delete_user( + return await self._api.delete_user( id, _request_timeout=_request_timeout, _request_auth=_request_auth, diff --git a/src/conductor/asyncio_client/adapters/api/version_resource_api.py b/src/conductor/asyncio_client/adapters/api/version_resource_api.py index e5a49c7a1..f56b673b8 100644 --- a/src/conductor/asyncio_client/adapters/api/version_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/version_resource_api.py @@ -1,4 +1,34 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.http.api import VersionResourceApi -class VersionResourceApiAdapter(VersionResourceApi): ... +class VersionResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = VersionResourceApi(api_client) + + async def get_version( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get the server's version""" + return await self._api.get_version( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/api/webhooks_config_resource_api.py b/src/conductor/asyncio_client/adapters/api/webhooks_config_resource_api.py index eb3b9e0d7..a6dc48761 100644 --- a/src/conductor/asyncio_client/adapters/api/webhooks_config_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/webhooks_config_resource_api.py @@ -1,4 +1,209 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, List, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter +from conductor.asyncio_client.adapters.models.webhook_config_adapter import WebhookConfigAdapter +from conductor.asyncio_client.adapters.utils import convert_list_to_adapter, convert_to_adapter from conductor.asyncio_client.http.api import WebhooksConfigResourceApi -class WebhooksConfigResourceApiAdapter(WebhooksConfigResourceApi): ... +class WebhooksConfigResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = WebhooksConfigResourceApi(api_client) + + async def create_webhook( + self, + webhook_config: WebhookConfigAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> WebhookConfigAdapter: + """Create a webhook""" + result = await self._api.create_webhook( + webhook_config, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, WebhookConfigAdapter) + + async def delete_tag_for_webhook( + self, + id: StrictStr, + tag: List[TagAdapter], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete a tag for webhook id""" + await self._api.delete_tag_for_webhook( + id, + tag, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete_webhook( + self, + id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete a webhook id""" + await self._api.delete_webhook( + id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def get_all_webhook( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[WebhookConfigAdapter]: + """Get all webhooks""" + result = await self._api.get_all_webhook( + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, WebhookConfigAdapter) + + async def get_tags_for_webhook( + self, + id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[TagAdapter]: + """Get tags by webhook id""" + result = await self._api.get_tags_for_webhook( + id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, TagAdapter) + + async def get_webhook( + self, + id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> WebhookConfigAdapter: + """Get webhook by id""" + result = await self._api.get_webhook( + id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, WebhookConfigAdapter) + + async def put_tag_for_webhook( + self, + id: StrictStr, + tag: List[TagAdapter], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Put a tag to webhook id""" + await self._api.put_tag_for_webhook( + id, + tag, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def update_webhook( + self, + id: StrictStr, + webhook_config: WebhookConfigAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> WebhookConfigAdapter: + """Update a webhook id""" + result = await self._api.update_webhook( + id, + webhook_config, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, WebhookConfigAdapter) diff --git a/src/conductor/asyncio_client/adapters/api/workflow_bulk_resource_api.py b/src/conductor/asyncio_client/adapters/api/workflow_bulk_resource_api.py index b1ae14379..bf66e9e26 100644 --- a/src/conductor/asyncio_client/adapters/api/workflow_bulk_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/workflow_bulk_resource_api.py @@ -1,4 +1,165 @@ +from __future__ import annotations + +from typing import Annotated, Any, Dict, List, Optional, Tuple, Union + +from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr + +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.bulk_response_adapter import BulkResponseAdapter +from conductor.asyncio_client.adapters.utils import convert_to_adapter from conductor.asyncio_client.http.api import WorkflowBulkResourceApi -class WorkflowBulkResourceApiAdapter(WorkflowBulkResourceApi): ... +class WorkflowBulkResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = WorkflowBulkResourceApi(api_client) + + async def delete( + self, + request_body: List[StrictStr], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> BulkResponseAdapter: + """Permanently remove workflows from the system""" + result = await self._api.delete( + request_body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, BulkResponseAdapter) + + async def pause_workflow1( + self, + request_body: List[StrictStr], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> BulkResponseAdapter: + """Pause the list of workflows""" + result = await self._api.pause_workflow1( + request_body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, BulkResponseAdapter) + + async def restart1( + self, + request_body: List[StrictStr], + use_latest_definitions: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> BulkResponseAdapter: + """Restart the list of completed workflow""" + result = await self._api.restart1( + request_body, + use_latest_definitions, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, BulkResponseAdapter) + + async def resume_workflow1( + self, + request_body: List[StrictStr], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> BulkResponseAdapter: + """Resume the list of paused workflows""" + result = await self._api.resume_workflow1( + request_body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, BulkResponseAdapter) + + async def retry1( + self, + request_body: List[StrictStr], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> BulkResponseAdapter: + """Retry the last failed task for each workflow from the list""" + result = await self._api.retry1( + request_body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, BulkResponseAdapter) + + async def terminate( + self, + request_body: List[StrictStr], + reason: Optional[StrictStr] = None, + trigger_failure_workflow: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> BulkResponseAdapter: + """Terminate the list of workflows""" + result = await self._api.terminate( + request_body, + reason, + trigger_failure_workflow, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, BulkResponseAdapter) diff --git a/src/conductor/asyncio_client/adapters/api/workflow_resource_api.py b/src/conductor/asyncio_client/adapters/api/workflow_resource_api.py index bce7a7f2e..90702ffe5 100644 --- a/src/conductor/asyncio_client/adapters/api/workflow_resource_api.py +++ b/src/conductor/asyncio_client/adapters/api/workflow_resource_api.py @@ -4,6 +4,7 @@ from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr, validate_call +from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.correlation_ids_search_request_adapter import ( CorrelationIdsSearchRequestAdapter, ) @@ -13,9 +14,18 @@ from conductor.asyncio_client.adapters.models.scrollable_search_result_workflow_summary_adapter import ( ScrollableSearchResultWorkflowSummaryAdapter, ) +from conductor.asyncio_client.adapters.models.skip_task_request_adapter import ( + SkipTaskRequestAdapter, +) from conductor.asyncio_client.adapters.models.start_workflow_request_adapter import ( StartWorkflowRequestAdapter, ) +from conductor.asyncio_client.adapters.models.task_list_search_result_summary_adapter import ( + TaskListSearchResultSummaryAdapter, +) +from conductor.asyncio_client.adapters.models.upgrade_workflow_request_adapter import ( + UpgradeWorkflowRequestAdapter, +) from conductor.asyncio_client.adapters.models.workflow_adapter import WorkflowAdapter from conductor.asyncio_client.adapters.models.workflow_run_adapter import WorkflowRunAdapter from conductor.asyncio_client.adapters.models.workflow_state_update_adapter import ( @@ -25,10 +35,66 @@ from conductor.asyncio_client.adapters.models.workflow_test_request_adapter import ( WorkflowTestRequestAdapter, ) +from conductor.asyncio_client.adapters.utils import ( + convert_dict_to_adapter, + convert_list_to_adapter, + convert_to_adapter, +) from conductor.asyncio_client.http.api import WorkflowResourceApi -class WorkflowResourceApiAdapter(WorkflowResourceApi): +class WorkflowResourceApiAdapter: + def __init__(self, api_client: ApiClient): + self._api = WorkflowResourceApi(api_client) + + async def decide( + self, + workflow_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Starts the decision task for a workflow""" + await self._api.decide( + workflow_id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def delete1( + self, + workflow_id: StrictStr, + archive_workflow: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Removes the workflow from the system""" + await self._api.delete1( + workflow_id, + archive_workflow, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def execute_workflow( self, name: StrictStr, @@ -47,7 +113,8 @@ async def execute_workflow( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> WorkflowRunAdapter: - result = await super().execute_workflow( + """Execute a workflow synchronously""" + result = await self._api.execute_workflow( name, version, request_id, @@ -60,7 +127,79 @@ async def execute_workflow( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, WorkflowRunAdapter) + + async def execute_workflow_as_api( + self, + name: StrictStr, + request_body: Dict[str, Dict[str, Any]], + version: Optional[StrictInt] = None, + request_id: Optional[StrictStr] = None, + wait_until_task_ref: Optional[StrictStr] = None, + wait_for_seconds: Optional[StrictInt] = None, + x_idempotency_key: Optional[StrictStr] = None, + x_on_conflict: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, object]: + """Execute a workflow synchronously with input and outputs""" + return await self._api.execute_workflow_as_api( + name, + request_body, + version, + request_id, + wait_until_task_ref, + wait_for_seconds, + x_idempotency_key, + x_on_conflict, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def execute_workflow_as_get_api( + self, + name: StrictStr, + version: Optional[StrictInt] = None, + request_id: Optional[StrictStr] = None, + wait_until_task_ref: Optional[StrictStr] = None, + wait_for_seconds: Optional[StrictInt] = None, + x_idempotency_key: Optional[StrictStr] = None, + x_on_conflict: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Dict[str, object]: + """(Deprecated) Execute a workflow synchronously with input and outputs using get api""" + return await self._api.execute_workflow_as_get_api( + name, + version, + request_id, + wait_until_task_ref, + wait_for_seconds, + x_idempotency_key, + x_on_conflict, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) async def get_execution_status( self, @@ -77,7 +216,8 @@ async def get_execution_status( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> WorkflowAdapter: - result = await super().get_execution_status( + """Get the execution status of a workflow""" + result = await self._api.get_execution_status( workflow_id, include_tasks, summarize, @@ -87,7 +227,37 @@ async def get_execution_status( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, WorkflowAdapter) + + async def get_execution_status_task_list( + self, + workflow_id: StrictStr, + start: Optional[StrictInt] = None, + count: Optional[StrictInt] = None, + status: Optional[List[StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> TaskListSearchResultSummaryAdapter: + """Get the execution status of a workflow's task list""" + result = await self._api.get_execution_status_task_list( + workflow_id, + start, + count, + status, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_to_adapter(result, TaskListSearchResultSummaryAdapter) async def get_workflow_status_summary( self, @@ -104,7 +274,8 @@ async def get_workflow_status_summary( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> WorkflowStatusAdapter: - result = await super().get_workflow_status_summary( + """Get the status summary of a workflow""" + result = await self._api.get_workflow_status_summary( workflow_id, include_output, include_variables, @@ -114,7 +285,7 @@ async def get_workflow_status_summary( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, WorkflowStatusAdapter) async def get_running_workflow( self, @@ -132,7 +303,8 @@ async def get_running_workflow( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> List[str]: - return await super().get_running_workflow( + """Get the running workflows""" + return await self._api.get_running_workflow( name, version, start_time, @@ -160,7 +332,8 @@ async def get_workflows( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> Dict[str, List[WorkflowAdapter]]: - result = await super().get_workflows( + """Get the workflows""" + result = await self._api.get_workflows( name, request_body, include_closed, @@ -171,7 +344,7 @@ async def get_workflows( _headers=_headers, _host_index=_host_index, ) - return result + return convert_dict_to_adapter(result, WorkflowAdapter) async def get_workflows1( self, @@ -188,7 +361,8 @@ async def get_workflows1( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> Dict[str, List[WorkflowAdapter]]: - result = await super().get_workflows1( + """Get the workflows""" + result = await self._api.get_workflows1( correlation_ids_search_request, include_closed, include_tasks, @@ -198,7 +372,87 @@ async def get_workflows1( _headers=_headers, _host_index=_host_index, ) - return result + return convert_dict_to_adapter(result, WorkflowAdapter) + + async def get_workflows2( + self, + name: StrictStr, + correlation_id: StrictStr, + include_closed: Optional[StrictBool] = None, + include_tasks: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[WorkflowAdapter]: + """Get the workflows""" + result = await self._api.get_workflows2( + name, + correlation_id, + include_closed, + include_tasks, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + return convert_list_to_adapter(result, WorkflowAdapter) + + async def jump_to_task( + self, + workflow_id: StrictStr, + task_reference_name: StrictStr, + request_body: Dict[str, Dict[str, Any]], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Jump to task""" + await self._api.jump_to_task( + workflow_id, + task_reference_name, + request_body, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def pause_workflow( + self, + workflow_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Pauses the workflow""" + await self._api.pause_workflow( + workflow_id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) async def search( self, @@ -218,7 +472,8 @@ async def search( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> ScrollableSearchResultWorkflowSummaryAdapter: - result = await super().search( + """Search for workflows""" + result = await self._api.search( start, size, sort, @@ -231,7 +486,57 @@ async def search( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, ScrollableSearchResultWorkflowSummaryAdapter) + + async def skip_task_from_workflow( + self, + workflow_id: StrictStr, + task_reference_name: StrictStr, + skip_task_request: SkipTaskRequestAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Skips a given task from a current running workflow""" + await self._api.skip_task_from_workflow( + workflow_id, + task_reference_name, + skip_task_request, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def start_workflow( + self, + start_workflow_request: StartWorkflowRequestAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain""" + return await self._api.start_workflow( + start_workflow_request, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) async def rerun( self, @@ -247,7 +552,8 @@ async def rerun( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> str: - return await super().rerun( + """Rerun a workflow""" + return await self._api.rerun( workflow_id, rerun_workflow_request, _request_timeout=_request_timeout, @@ -257,6 +563,29 @@ async def rerun( _host_index=_host_index, ) + async def reset_workflow( + self, + workflow_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Resets callback times of all non-terminal SIMPLE tasks to 0""" + await self._api.reset_workflow( + workflow_id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def restart( self, workflow_id: StrictStr, @@ -271,7 +600,8 @@ async def restart( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - await super().restart( + """Restart a workflow""" + await self._api.restart( workflow_id, use_latest_definitions, _request_timeout=_request_timeout, @@ -281,6 +611,56 @@ async def restart( _host_index=_host_index, ) + async def resume_workflow( + self, + workflow_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Resumes the workflow""" + await self._api.resume_workflow( + workflow_id, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + async def retry( + self, + workflow_id: StrictStr, + resume_subworkflow_tasks: Optional[StrictBool] = None, + retry_if_retried_by_parent: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Retries the last failed task""" + await self._api.retry( + workflow_id, + resume_subworkflow_tasks, + retry_if_retried_by_parent, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + async def update_workflow_and_task_state( self, workflow_id: StrictStr, @@ -298,7 +678,8 @@ async def update_workflow_and_task_state( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> WorkflowRunAdapter: - result = await super().update_workflow_and_task_state( + """Update the workflow and task state""" + result = await self._api.update_workflow_and_task_state( workflow_id, request_id, workflow_state_update, @@ -310,22 +691,23 @@ async def update_workflow_and_task_state( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, WorkflowRunAdapter) async def test_workflow( self, workflow_test_request: WorkflowTestRequestAdapter, - _request_timeout: Union[ # noqa: PT019 + _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, # noqa: PT019 - _content_type: Optional[StrictStr] = None, # noqa: PT019 - _headers: Optional[Dict[StrictStr, Any]] = None, # noqa: PT019 - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, # noqa: PT019 + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> WorkflowAdapter: - result = await super().test_workflow( + """Test a workflow""" + result = await self._api.test_workflow( workflow_test_request, _request_timeout=_request_timeout, _request_auth=_request_auth, @@ -333,13 +715,12 @@ async def test_workflow( _headers=_headers, _host_index=_host_index, ) - return result + return convert_to_adapter(result, WorkflowAdapter) - @validate_call async def update_workflow_state( self, workflow_id: StrictStr, - request_body: Dict[str, Any], + request_body: Dict[str, Dict[str, Any]], _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -350,54 +731,42 @@ async def update_workflow_state( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> WorkflowAdapter: - """Update workflow variables - - Updates the workflow variables and triggers evaluation. - - :param workflow_id: (required) - :type workflow_id: str - :param request_body: (required) - :type request_body: Dict[str, object] - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ - - _param = self._update_workflow_state_serialize( - workflow_id=workflow_id, - request_body=request_body, + """Update the workflow state""" + result = await self._api.update_workflow_state( + workflow_id, + request_body, + _request_timeout=_request_timeout, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index, ) + return convert_to_adapter(result, WorkflowAdapter) - _response_types_map: Dict[str, Optional[str]] = { - "200": "Workflow", - } - response_data = await self.api_client.call_api(*_param, _request_timeout=_request_timeout) - await response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data + async def upgrade_running_workflow_to_version( + self, + workflow_id: StrictStr, + upgrade_workflow_request: UpgradeWorkflowRequestAdapter, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Upgrade running workflow to newer version""" + await self._api.upgrade_running_workflow_to_version( + workflow_id, + upgrade_workflow_request, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) @validate_call async def start_workflow1( @@ -425,7 +794,7 @@ async def start_workflow1( :param name: (required) :type name: str :param request_body: (required) - :type request_body: Dict[str, object] + :type request_body: Dict[str, Any] :param version: :type version: int :param correlation_id: @@ -458,7 +827,7 @@ async def start_workflow1( :return: Returns the result object. """ - _param = self._start_workflow1_serialize( + _param = self._api._start_workflow1_serialize( name=name, request_body=request_body, version=version, @@ -475,9 +844,38 @@ async def start_workflow1( _response_types_map: Dict[str, Optional[str]] = { "200": "str", } - response_data = await self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data = await self._api.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) await response_data.read() - return self.api_client.response_deserialize( + return self._api.api_client.response_deserialize( response_data=response_data, response_types_map=_response_types_map, ).data + + async def terminate1( + self, + workflow_id: StrictStr, + reason: Optional[StrictStr] = None, + trigger_failure_workflow: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Terminate workflow execution""" + await self._api.terminate1( + workflow_id, + reason, + trigger_failure_workflow, + _request_timeout=_request_timeout, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) diff --git a/src/conductor/asyncio_client/adapters/models/__init__.py b/src/conductor/asyncio_client/adapters/models/__init__.py index 3e97d2d6d..1e04fd4cb 100644 --- a/src/conductor/asyncio_client/adapters/models/__init__.py +++ b/src/conductor/asyncio_client/adapters/models/__init__.py @@ -23,12 +23,12 @@ from conductor.asyncio_client.adapters.models.connectivity_test_result_adapter import ( ConnectivityTestResultAdapter as ConnectivityTestResult, ) -from conductor.asyncio_client.adapters.models.create_or_update_application_request_adapter import ( - CreateOrUpdateApplicationRequestAdapter as CreateOrUpdateApplicationRequest, -) from conductor.asyncio_client.adapters.models.correlation_ids_search_request_adapter import ( CorrelationIdsSearchRequestAdapter as CorrelationIdsSearchRequest, ) +from conductor.asyncio_client.adapters.models.create_or_update_application_request_adapter import ( + CreateOrUpdateApplicationRequestAdapter as CreateOrUpdateApplicationRequest, +) from conductor.asyncio_client.adapters.models.declaration_adapter import ( DeclarationAdapter as Declaration, ) @@ -415,7 +415,6 @@ WorkflowTestRequestAdapter as WorkflowTestRequest, ) - __all__ = [ "Action", "Any", diff --git a/src/conductor/asyncio_client/adapters/models/access_key_adapter.py b/src/conductor/asyncio_client/adapters/models/access_key_adapter.py new file mode 100644 index 000000000..ca578fc69 --- /dev/null +++ b/src/conductor/asyncio_client/adapters/models/access_key_adapter.py @@ -0,0 +1,22 @@ +from typing import Any, Dict + +from pydantic import BaseModel, Field + +from conductor.shared.http.enums import AccessKeyStatus + + +class AccessKeyAdapter(BaseModel): + id: str + status: AccessKeyStatus + created_at: int = Field(alias="createdAt") + + @classmethod + def from_dict(cls, obj: Dict[str, Any]) -> "AccessKeyAdapter": + _obj = cls.model_validate( + { + "id": obj.get("id"), + "status": obj.get("status"), + "createdAt": obj.get("createdAt"), + } + ) + return _obj diff --git a/src/conductor/asyncio_client/adapters/models/created_access_key_adapter.py b/src/conductor/asyncio_client/adapters/models/created_access_key_adapter.py new file mode 100644 index 000000000..b1e6fab07 --- /dev/null +++ b/src/conductor/asyncio_client/adapters/models/created_access_key_adapter.py @@ -0,0 +1,15 @@ +from typing import Any, Dict + +from pydantic import BaseModel + + +class CreatedAccessKeyAdapter(BaseModel): + id: str + secret: str + + @classmethod + def from_dict(cls, obj: Dict[str, Any]) -> "CreatedAccessKeyAdapter": + return cls( + id=obj["id"], + secret=obj["secret"], + ) diff --git a/src/conductor/asyncio_client/adapters/models/workflow_run_adapter.py b/src/conductor/asyncio_client/adapters/models/workflow_run_adapter.py index b72c78af2..9993a5a03 100644 --- a/src/conductor/asyncio_client/adapters/models/workflow_run_adapter.py +++ b/src/conductor/asyncio_client/adapters/models/workflow_run_adapter.py @@ -14,16 +14,16 @@ class WorkflowRunAdapter(WorkflowRun): variables: Optional[Dict[str, Any]] = None @property - def current_task(self) -> TaskAdapter: + def current_task(self) -> TaskAdapter: # type: ignore[override] current = None - for task in self.tasks: + for task in self.tasks or []: if task.status in ("SCHEDULED", "IN_PROGRESS"): current = task - return current + return current # type: ignore[return-value] def get_task( self, name: Optional[str] = None, task_reference_name: Optional[str] = None - ) -> TaskAdapter: + ) -> TaskAdapter: # type: ignore[override] if name is None and task_reference_name is None: raise Exception( "ONLY one of name or task_reference_name MUST be provided. None were provided" @@ -34,13 +34,13 @@ def get_task( ) current = None - for task in self.tasks: - if ( - task.task_def_name == name - or task.workflow_task.task_reference_name == task_reference_name + for task in self.tasks or []: + if task.task_def_name == name or ( + task.workflow_task is not None + and task.workflow_task.task_reference_name == task_reference_name ): current = task - return current + return current # type: ignore[return-value] @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: diff --git a/src/conductor/asyncio_client/adapters/models/workflow_schedule_adapter.py b/src/conductor/asyncio_client/adapters/models/workflow_schedule_adapter.py index 941b6cf55..f6ec63764 100644 --- a/src/conductor/asyncio_client/adapters/models/workflow_schedule_adapter.py +++ b/src/conductor/asyncio_client/adapters/models/workflow_schedule_adapter.py @@ -9,7 +9,7 @@ class WorkflowScheduleAdapter(WorkflowSchedule): - start_workflow_request: Optional["StartWorkflowRequestAdapter"] = Field( + start_workflow_request: Optional["StartWorkflowRequestAdapter"] = Field( # type: ignore[override] default=None, alias="startWorkflowRequest" ) tags: Optional[List["TagAdapter"]] = None diff --git a/src/conductor/asyncio_client/adapters/models/workflow_schedule_model_adapter.py b/src/conductor/asyncio_client/adapters/models/workflow_schedule_model_adapter.py index e0d3963fc..635843c87 100644 --- a/src/conductor/asyncio_client/adapters/models/workflow_schedule_model_adapter.py +++ b/src/conductor/asyncio_client/adapters/models/workflow_schedule_model_adapter.py @@ -9,7 +9,7 @@ class WorkflowScheduleModelAdapter(WorkflowScheduleModel): - start_workflow_request: Optional["StartWorkflowRequestAdapter"] = Field( + start_workflow_request: Optional["StartWorkflowRequestAdapter"] = Field( # type: ignore[override] default=None, alias="startWorkflowRequest" ) tags: Optional[List["TagAdapter"]] = None diff --git a/src/conductor/asyncio_client/adapters/utils.py b/src/conductor/asyncio_client/adapters/utils.py new file mode 100644 index 000000000..9423e1773 --- /dev/null +++ b/src/conductor/asyncio_client/adapters/utils.py @@ -0,0 +1,53 @@ +""" +Utility functions for converting between generated models and adapters. +""" + +from typing import Any, Dict, List, Type, TypeVar + +from pydantic import BaseModel + +T = TypeVar("T", bound=BaseModel) + + +def convert_list_to_adapter(items: List[Any], adapter_class: Type[T]) -> List[T]: + """ + Convert a list of generated models to a list of adapters. + + Args: + items: List of generated model instances + adapter_class: The adapter class to convert to + + Returns: + List of adapter instances + """ + return [adapter_class.model_validate(item.model_dump()) for item in items] + + +def convert_to_adapter(item: Any, adapter_class: Type[T]) -> T: + """ + Convert a single generated model to an adapter. + + Args: + item: Generated model instance + adapter_class: The adapter class to convert to + + Returns: + Adapter instance + """ + return adapter_class.model_validate(item.model_dump()) + + +def convert_dict_to_adapter( + input_dict: Dict[str, List[Any]], adapter_class: Type[T] +) -> Dict[str, List[T]]: + """ + Convert a dictionary of model lists to a dictionary of adapter lists. + + Args: + input_dict: Dictionary mapping string keys to lists of model instances + adapter_class: The adapter class to convert to + + Returns: + Dictionary mapping string keys to lists of adapter instances + """ + return {key: convert_list_to_adapter(items, adapter_class) for key, items in input_dict.items()} diff --git a/src/conductor/asyncio_client/configuration/configuration.py b/src/conductor/asyncio_client/configuration/configuration.py index 55102039a..3f4a95acd 100644 --- a/src/conductor/asyncio_client/configuration/configuration.py +++ b/src/conductor/asyncio_client/configuration/configuration.py @@ -134,7 +134,7 @@ def __init__( if server_url is not None: self.server_url = server_url else: - self.server_url = os.getenv("CONDUCTOR_SERVER_URL") + self.server_url = os.getenv("CONDUCTOR_SERVER_URL") # type: ignore[assignment] if self.server_url is None or self.server_url == "": self.server_url = "http://localhost:8080/api" @@ -143,12 +143,12 @@ def __init__( if auth_key is not None: self.auth_key = auth_key else: - self.auth_key = os.getenv("CONDUCTOR_AUTH_KEY") + self.auth_key = os.getenv("CONDUCTOR_AUTH_KEY") # type: ignore[assignment] if auth_secret is not None: self.auth_secret = auth_secret else: - self.auth_secret = os.getenv("CONDUCTOR_AUTH_SECRET") + self.auth_secret = os.getenv("CONDUCTOR_AUTH_SECRET") # type: ignore[assignment] # Additional worker properties with environment variable fallback self.polling_interval = polling_interval or self._get_env_int( @@ -176,7 +176,7 @@ def __init__( self.proxy_headers: Optional[Dict[str, Any]] = proxy_headers if not self.proxy_headers and os.getenv("CONDUCTOR_PROXY_HEADERS"): try: - self.proxy_headers = json.loads(os.getenv("CONDUCTOR_PROXY_HEADERS")) + self.proxy_headers = json.loads(os.getenv("CONDUCTOR_PROXY_HEADERS")) # type: ignore[arg-type] except (json.JSONDecodeError, TypeError): # If JSON parsing fails, treat as a single header value self.proxy_headers = {"Authorization": os.getenv("CONDUCTOR_PROXY_HEADERS")} @@ -213,19 +213,19 @@ def __init__( http_config_kwargs["verify_ssl"] = self._get_env_bool("CONDUCTOR_VERIFY_SSL", True) http_config_kwargs.update(kwargs) - self._http_config = HttpConfiguration(**http_config_kwargs) + self._http_config = HttpConfiguration(**http_config_kwargs) # type: ignore[arg-type] # Set proxy configuration on the HTTP config if self.proxy: self._http_config.proxy = self.proxy if self.proxy_headers: - self._http_config.proxy_headers = self.proxy_headers + self._http_config.proxy_headers = self.proxy_headers # type: ignore[assignment] # Set proxy configuration on the HTTP config if self.proxy: self._http_config.proxy = self.proxy if self.proxy_headers: - self._http_config.proxy_headers = self.proxy_headers + self._http_config.proxy_headers = self.proxy_headers # type: ignore[assignment] # Set proxy configuration on the HTTP config if self.proxy: diff --git a/src/conductor/asyncio_client/event/event_client.py b/src/conductor/asyncio_client/event/event_client.py index 879cb563f..82218730f 100644 --- a/src/conductor/asyncio_client/event/event_client.py +++ b/src/conductor/asyncio_client/event/event_client.py @@ -29,7 +29,7 @@ async def get_queue_configuration(self, queue_type: str, queue_name: str): async def put_queue_configuration(self, queue_configuration: QueueConfiguration): return await self.client.put_queue_config( - body=queue_configuration.get_worker_configuration(), + body=queue_configuration.get_worker_configuration(), # type: ignore[arg-type] queue_name=queue_configuration.queue_name, queue_type=queue_configuration.queue_type, ) diff --git a/src/conductor/asyncio_client/orkes/orkes_authorization_client.py b/src/conductor/asyncio_client/orkes/orkes_authorization_client.py index b06e915db..3f5a5672b 100644 --- a/src/conductor/asyncio_client/orkes/orkes_authorization_client.py +++ b/src/conductor/asyncio_client/orkes/orkes_authorization_client.py @@ -2,9 +2,13 @@ from typing import Any, Dict, List, Optional, cast +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated + from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.models.access_key_adapter import AccessKeyAdapter from conductor.asyncio_client.adapters.models.authorization_request_adapter import ( - AuthorizationRequestAdapter as AuthorizationRequest, + AuthorizationRequestAdapter, ) from conductor.asyncio_client.adapters.models.conductor_user_adapter import ( ConductorUserAdapter, @@ -12,214 +16,1315 @@ from conductor.asyncio_client.adapters.models.create_or_update_application_request_adapter import ( CreateOrUpdateApplicationRequestAdapter, ) +from conductor.asyncio_client.adapters.models.created_access_key_adapter import ( + CreatedAccessKeyAdapter, +) from conductor.asyncio_client.adapters.models.extended_conductor_application_adapter import ( ExtendedConductorApplicationAdapter, ) from conductor.asyncio_client.adapters.models.granted_access_adapter import ( - GrantedAccessAdapter as GrantedAccess, + GrantedAccessAdapter, +) +from conductor.asyncio_client.adapters.models.granted_access_response_adapter import ( + GrantedAccessResponseAdapter, ) from conductor.asyncio_client.adapters.models.group_adapter import GroupAdapter +from conductor.asyncio_client.adapters.models.subject_ref_adapter import SubjectRefAdapter from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter from conductor.asyncio_client.adapters.models.target_ref_adapter import ( - TargetRefAdapter as TargetRef, + TargetRefAdapter, ) from conductor.asyncio_client.adapters.models.upsert_group_request_adapter import ( - UpsertGroupRequestAdapter as UpsertGroupRequest, + UpsertGroupRequestAdapter, ) from conductor.asyncio_client.adapters.models.upsert_user_request_adapter import ( - UpsertUserRequestAdapter as UpsertUserRequest, + UpsertUserRequestAdapter, ) from conductor.asyncio_client.configuration.configuration import Configuration from conductor.asyncio_client.orkes.orkes_base_client import OrkesBaseClient -from conductor.client.orkes.models.access_key import AccessKey class OrkesAuthorizationClient(OrkesBaseClient): def __init__(self, configuration: Configuration, api_client: ApiClient): + """Initialize the OrkesAuthorizationClient with configuration and API client. + + Args: + configuration: Configuration object containing server settings and authentication + api_client: ApiClient instance for making API requests + + Example: + ```python + from conductor.asyncio_client.configuration.configuration import Configuration + from conductor.asyncio_client.adapters import ApiClient + + config = Configuration(server_api_url="http://localhost:8080/api") + api_client = ApiClient(configuration=config) + auth_client = OrkesAuthorizationClient(config, api_client) + ``` + """ super().__init__(configuration, api_client) # User Operations + @deprecated("create_user is deprecated; use create_user_validated instead") + @typing_deprecated("create_user is deprecated; use create_user_validated instead") async def create_user( - self, user_id: str, upsert_user_request: UpsertUserRequest - ) -> ConductorUserAdapter: - """Create a new user""" - return await self.user_api.upsert_user(id=user_id, upsert_user_request=upsert_user_request) + self, user_id: str, upsert_user_request: UpsertUserRequestAdapter + ) -> object: + """Create a new user. + + .. deprecated:: + Use create_user_validated instead for type-safe validated responses. + + Args: + user_id: Unique identifier for the user + upsert_user_request: User details including name, roles, and groups + + Returns: + Raw response object from the API + + Example: + ```python + from conductor.asyncio_client.adapters.models.upsert_user_request_adapter import UpsertUserRequestAdapter + + request = UpsertUserRequestAdapter(name="John Doe", roles=["USER"]) + await auth_client.create_user("john.doe@example.com", request) + ``` + """ + return await self._user_api.upsert_user(id=user_id, upsert_user_request=upsert_user_request) + + async def create_user_validated( + self, user_id: str, upsert_user_request: UpsertUserRequestAdapter, **kwargs + ) -> Optional[ConductorUserAdapter]: + """Create a new user and return a validated ConductorUserAdapter. + + Args: + user_id: Unique identifier for the user (typically email address) + upsert_user_request: User details including name, roles, and groups + **kwargs: Additional optional parameters to pass to the API + + Returns: + ConductorUserAdapter instance containing the created user details, or None if creation failed + + Example: + ```python + from conductor.asyncio_client.adapters.models.upsert_user_request_adapter import UpsertUserRequestAdapter + + request = UpsertUserRequestAdapter( + name="John Doe", + roles=["USER"], + groups=["engineering"] + ) + user = await auth_client.create_user_validated("john.doe@example.com", request) + print(f"Created user: {user.name}") + ``` + """ + result = await self._user_api.upsert_user( + id=user_id, upsert_user_request=upsert_user_request, **kwargs + ) + result_dict = cast(Dict[str, Any], result) + result_model = ConductorUserAdapter.from_dict(result_dict) + + return result_model + + @deprecated("update_user is deprecated; use update_user_validated instead") + @typing_deprecated("update_user is deprecated; use update_user_validated instead") async def update_user( - self, user_id: str, upsert_user_request: UpsertUserRequest - ) -> ConductorUserAdapter: - """Update an existing user""" - return await self.user_api.upsert_user(id=user_id, upsert_user_request=upsert_user_request) + self, user_id: str, upsert_user_request: UpsertUserRequestAdapter + ) -> object: + """Update an existing user. + + .. deprecated:: + Use update_user_validated instead for type-safe validated responses. + + Args: + user_id: Unique identifier for the user to update + upsert_user_request: Updated user details + + Returns: + Raw response object from the API + + Example: + ```python + from conductor.asyncio_client.adapters.models.upsert_user_request_adapter import UpsertUserRequestAdapter + + request = UpsertUserRequestAdapter(name="John Smith", roles=["USER", "ADMIN"]) + await auth_client.update_user("john.doe@example.com", request) + ``` + """ + return await self._user_api.upsert_user(id=user_id, upsert_user_request=upsert_user_request) + + async def update_user_validated( + self, user_id: str, upsert_user_request: UpsertUserRequestAdapter, **kwargs + ) -> Optional[ConductorUserAdapter]: + """Update an existing user and return a validated ConductorUserAdapter. + + Args: + user_id: Unique identifier for the user to update + upsert_user_request: Updated user details including name, roles, and groups + **kwargs: Additional optional parameters to pass to the API + + Returns: + ConductorUserAdapter instance containing the updated user details, or None if update failed + + Example: + ```python + from conductor.asyncio_client.adapters.models.upsert_user_request_adapter import UpsertUserRequestAdapter + + request = UpsertUserRequestAdapter( + name="John Smith", + roles=["USER", "ADMIN"] + ) + user = await auth_client.update_user_validated("john.doe@example.com", request) + print(f"Updated user: {user.name}") + ``` + """ + result = await self._user_api.upsert_user( + id=user_id, upsert_user_request=upsert_user_request, **kwargs + ) + + result_dict = cast(Dict[str, Any], result) + result_model = ConductorUserAdapter.from_dict(result_dict) + + return result_model + + async def get_user(self, user_id: str, **kwargs) -> Optional[ConductorUserAdapter]: + """Get a user by ID and return a validated ConductorUserAdapter. - async def get_user(self, user_id: str) -> Optional[ConductorUserAdapter]: - """Get user by ID""" - result = await self.user_api.get_user(id=user_id) - return ConductorUserAdapter.from_dict(result) + Args: + user_id: Unique identifier for the user to retrieve + **kwargs: Additional optional parameters to pass to the API - async def delete_user(self, user_id: str) -> None: - """Delete user by ID""" - await self.user_api.delete_user(id=user_id) + Returns: + ConductorUserAdapter instance containing the user details, or None if user not found - async def list_users(self, include_apps: bool = False) -> List[ConductorUserAdapter]: - """List all users""" - return await self.user_api.list_users(apps=include_apps) + Example: + ```python + user = await auth_client.get_user("john.doe@example.com") + if user: + print(f"User: {user.name}, Roles: {user.roles}") + ``` + """ + result = await self._user_api.get_user(id=user_id, **kwargs) + + result_dict = cast(Dict[str, Any], result) + result_model = ConductorUserAdapter.from_dict(result_dict) + + return result_model + + async def delete_user(self, user_id: str, **kwargs) -> None: + """Delete a user by ID. + + Args: + user_id: Unique identifier for the user to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await auth_client.delete_user("john.doe@example.com") + ``` + """ + await self._user_api.delete_user(id=user_id, **kwargs) + + async def list_users(self, include_apps: bool = False, **kwargs) -> List[ConductorUserAdapter]: + """List all users in the system. + + Args: + include_apps: If True, include application users in the result. Defaults to False + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of ConductorUserAdapter instances representing all users + + Example: + ```python + # List only regular users + users = await auth_client.list_users() + for user in users: + print(f"User: {user.name}") + + # Include application users + all_users = await auth_client.list_users(include_apps=True) + ``` + """ + return await self._user_api.list_users(apps=include_apps, **kwargs) + + async def get_user_permissions( + self, user_id: str, **kwargs + ) -> Optional[GrantedAccessResponseAdapter]: + """Get permissions granted to a user. + + Args: + user_id: Unique identifier for the user + **kwargs: Additional optional parameters to pass to the API + + Returns: + GrantedAccessResponseAdapter containing all permissions granted to the user, or None if user not found + + Example: + ```python + permissions = await auth_client.get_user_permissions("john.doe@example.com") + if permissions and permissions.granted_access: + for access in permissions.granted_access: + print(f"Access: {access.access} on {access.target.type}:{access.target.id}") + ``` + """ + result = await self._user_api.get_granted_permissions(user_id, **kwargs) + + result_dict = cast(Dict[str, Any], result) + result_model = GrantedAccessResponseAdapter.from_dict(result_dict) + + return result_model # Application Operations async def create_application( - self, application: CreateOrUpdateApplicationRequestAdapter + self, + create_or_update_application_request: CreateOrUpdateApplicationRequestAdapter, + **kwargs, ) -> Optional[ExtendedConductorApplicationAdapter]: - """Create a new application""" - result = await self.application_api.create_application( - create_or_update_application_request=application + """Create a new application and return a validated ExtendedConductorApplicationAdapter. + + Args: + create_or_update_application_request: Application details including name and owner + **kwargs: Additional optional parameters to pass to the API + + Returns: + ExtendedConductorApplicationAdapter instance containing the created application details, or None if creation failed + + Example: + ```python + from conductor.asyncio_client.adapters.models.create_or_update_application_request_adapter import CreateOrUpdateApplicationRequestAdapter + + request = CreateOrUpdateApplicationRequestAdapter( + name="My Application", + owner="engineering-team" + ) + app = await auth_client.create_application(request) + print(f"Created application: {app.name} with ID: {app.id}") + ``` + """ + result = await self._application_api.create_application( + create_or_update_application_request=create_or_update_application_request, **kwargs ) - return ExtendedConductorApplicationAdapter.from_dict(result) + + result_dict = cast(Dict[str, Any], result) + result_model = ExtendedConductorApplicationAdapter.from_dict(result_dict) + + return result_model async def update_application( - self, application_id: str, application: CreateOrUpdateApplicationRequestAdapter + self, + application_id: str, + create_or_update_application_request: CreateOrUpdateApplicationRequestAdapter, + **kwargs, ) -> Optional[ExtendedConductorApplicationAdapter]: - """Update an existing application""" - result = await self.application_api.update_application( - id=application_id, create_or_update_application_request=application + """Update an existing application and return a validated ExtendedConductorApplicationAdapter. + + Args: + application_id: Unique identifier for the application to update + create_or_update_application_request: Updated application details + **kwargs: Additional optional parameters to pass to the API + + Returns: + ExtendedConductorApplicationAdapter instance containing the updated application details, or None if update failed + + Example: + ```python + from conductor.asyncio_client.adapters.models.create_or_update_application_request_adapter import CreateOrUpdateApplicationRequestAdapter + + request = CreateOrUpdateApplicationRequestAdapter( + name="Updated Application Name", + owner="new-owner-team" + ) + app = await auth_client.update_application("app-123", request) + print(f"Updated application: {app.name}") + ``` + """ + result = await self._application_api.update_application( + id=application_id, + create_or_update_application_request=create_or_update_application_request, + **kwargs, ) - return ExtendedConductorApplicationAdapter.from_dict(result) + + result_dict = cast(Dict[str, Any], result) + result_model = ExtendedConductorApplicationAdapter.from_dict(result_dict) + + return result_model async def get_application( - self, application_id: str + self, application_id: str, **kwargs ) -> Optional[ExtendedConductorApplicationAdapter]: - """Get application by ID""" - result = await self.application_api.get_application(id=application_id) - return ExtendedConductorApplicationAdapter.from_dict(result) + """Get an application by ID and return a validated ExtendedConductorApplicationAdapter. + + Args: + application_id: Unique identifier for the application to retrieve + **kwargs: Additional optional parameters to pass to the API + + Returns: + ExtendedConductorApplicationAdapter instance containing the application details, or None if not found + + Example: + ```python + app = await auth_client.get_application("app-123") + if app: + print(f"Application: {app.name}, Owner: {app.owner}") + ``` + """ + result = await self._application_api.get_application(id=application_id, **kwargs) + + result_dict = cast(Dict[str, Any], result) + result_model = ExtendedConductorApplicationAdapter.from_dict(result_dict) + + return result_model - async def delete_application(self, application_id: str) -> None: - """Delete application by ID""" - await self.application_api.delete_application(id=application_id) + async def delete_application(self, application_id: str, **kwargs) -> None: + """Delete an application by ID. - async def list_applications(self) -> List[ExtendedConductorApplicationAdapter]: - """List all applications""" - return await self.application_api.list_applications() + Args: + application_id: Unique identifier for the application to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await auth_client.delete_application("app-123") + ``` + """ + await self._application_api.delete_application(id=application_id, **kwargs) + + async def list_applications(self, **kwargs) -> List[ExtendedConductorApplicationAdapter]: + """List all applications in the system. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of ExtendedConductorApplicationAdapter instances representing all applications + + Example: + ```python + apps = await auth_client.list_applications() + for app in apps: + print(f"Application: {app.name}, ID: {app.id}") + ``` + """ + return await self._application_api.list_applications(**kwargs) # Group Operations + @deprecated("create_group is deprecated; use create_group_validated instead") + @typing_deprecated("create_group is deprecated; use create_group_validated instead") async def create_group( - self, group_id: str, upsert_group_request: UpsertGroupRequest - ) -> GroupAdapter: - """Create a new group""" - return await self.group_api.upsert_group( + self, group_id: str, upsert_group_request: UpsertGroupRequestAdapter + ) -> object: + """Create a new group. + + .. deprecated:: + Use create_group_validated instead for type-safe validated responses. + + Args: + group_id: Unique identifier for the group + upsert_group_request: Group details including description and roles + + Returns: + Raw response object from the API + + Example: + ```python + from conductor.asyncio_client.adapters.models.upsert_group_request_adapter import UpsertGroupRequestAdapter + + request = UpsertGroupRequestAdapter(description="Engineering team", roles=["WORKER"]) + await auth_client.create_group("engineering", request) + ``` + """ + return await self._group_api.upsert_group( id=group_id, upsert_group_request=upsert_group_request ) + async def create_group_validated( + self, group_id: str, upsert_group_request: UpsertGroupRequestAdapter, **kwargs + ) -> Optional[GroupAdapter]: + """Create a new group and return a validated GroupAdapter. + + Args: + group_id: Unique identifier for the group + upsert_group_request: Group details including description and roles + **kwargs: Additional optional parameters to pass to the API + + Returns: + GroupAdapter instance containing the created group details, or None if creation failed + + Example: + ```python + from conductor.asyncio_client.adapters.models.upsert_group_request_adapter import UpsertGroupRequestAdapter + + request = UpsertGroupRequestAdapter( + description="Engineering team", + roles=["WORKER"] + ) + group = await auth_client.create_group_validated("engineering", request) + print(f"Created group: {group.id}") + ``` + """ + result = await self._group_api.upsert_group( + id=group_id, upsert_group_request=upsert_group_request, **kwargs + ) + + result_dict = cast(Dict[str, Any], result) + result_model = GroupAdapter.from_dict(result_dict) + + return result_model + async def update_group( - self, group_id: str, upsert_group_request: UpsertGroupRequest + self, group_id: str, upsert_group_request: UpsertGroupRequestAdapter, **kwargs ) -> Optional[GroupAdapter]: - """Update an existing group""" - result = await self.group_api.upsert_group( - id=group_id, upsert_group_request=upsert_group_request + """Update an existing group and return a validated GroupAdapter. + + Args: + group_id: Unique identifier for the group to update + upsert_group_request: Updated group details + **kwargs: Additional optional parameters to pass to the API + + Returns: + GroupAdapter instance containing the updated group details, or None if update failed + + Example: + ```python + from conductor.asyncio_client.adapters.models.upsert_group_request_adapter import UpsertGroupRequestAdapter + + request = UpsertGroupRequestAdapter( + description="Updated engineering team", + roles=["WORKER", "ADMIN"] + ) + group = await auth_client.update_group("engineering", request) + print(f"Updated group: {group.id}") + ``` + """ + result = await self._group_api.upsert_group( + id=group_id, upsert_group_request=upsert_group_request, **kwargs ) - return GroupAdapter.from_dict(result) - async def get_group(self, group_id: str) -> Optional[GroupAdapter]: - """Get group by ID""" - result = await self.group_api.get_group(id=group_id) - return GroupAdapter.from_dict(result) + result_dict = cast(Dict[str, Any], result) + result_model = GroupAdapter.from_dict(result_dict) + + return result_model + + async def get_group(self, group_id: str, **kwargs) -> Optional[GroupAdapter]: + """Get a group by ID and return a validated GroupAdapter. + + Args: + group_id: Unique identifier for the group to retrieve + **kwargs: Additional optional parameters to pass to the API + + Returns: + GroupAdapter instance containing the group details, or None if not found + + Example: + ```python + group = await auth_client.get_group("engineering") + if group: + print(f"Group: {group.id}, Description: {group.description}") + ``` + """ + result = await self._group_api.get_group(id=group_id, **kwargs) + + result_dict = cast(Dict[str, Any], result) + result_model = GroupAdapter.from_dict(result_dict) + + return result_model + + async def delete_group(self, group_id: str, **kwargs) -> None: + """Delete a group by ID. + + Args: + group_id: Unique identifier for the group to delete + **kwargs: Additional optional parameters to pass to the API - async def delete_group(self, group_id: str) -> None: - """Delete group by ID""" - await self.group_api.delete_group(id=group_id) + Returns: + None - async def list_groups(self) -> List[GroupAdapter]: - """List all groups""" - return await self.group_api.list_groups() + Example: + ```python + await auth_client.delete_group("engineering") + ``` + """ + await self._group_api.delete_group(id=group_id, **kwargs) + + async def list_groups(self, **kwargs) -> List[GroupAdapter]: + """List all groups in the system. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of GroupAdapter instances representing all groups + + Example: + ```python + groups = await auth_client.list_groups() + for group in groups: + print(f"Group: {group.id}, Description: {group.description}") + ``` + """ + return await self._group_api.list_groups(**kwargs) # Group User Management Operations + @deprecated("add_user_to_group is deprecated; use add_user_to_group_validated instead") + @typing_deprecated("add_user_to_group is deprecated; use add_user_to_group_validated instead") async def add_user_to_group(self, group_id: str, user_id: str) -> object: - """Add a user to a group""" - return await self.group_api.add_user_to_group(group_id=group_id, user_id=user_id) + """Add a user to a group. + + .. deprecated:: + Use add_user_to_group_validated instead for type-safe validated responses. + + Args: + group_id: Unique identifier for the group + user_id: Unique identifier for the user to add + + Returns: + Raw response object from the API + + Example: + ```python + await auth_client.add_user_to_group("engineering", "john.doe@example.com") + ``` + """ + return await self._group_api.add_user_to_group(group_id=group_id, user_id=user_id) + + async def add_user_to_group_validated(self, group_id: str, user_id: str, **kwargs) -> None: + """Add a user to a group. + Args: + group_id: Unique identifier for the group + user_id: Unique identifier for the user to add + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await auth_client.add_user_to_group_validated("engineering", "john.doe@example.com") + ``` + """ + await self._group_api.add_user_to_group(group_id=group_id, user_id=user_id, **kwargs) + + @deprecated( + "remove_user_from_group is deprecated; use remove_user_from_group_validated instead" + ) + @typing_deprecated( + "remove_user_from_group is deprecated; use remove_user_from_group_validated instead" + ) async def remove_user_from_group(self, group_id: str, user_id: str) -> object: - """Remove a user from a group""" - return await self.group_api.remove_user_from_group(group_id=group_id, user_id=user_id) + """Remove a user from a group. + + .. deprecated:: + Use remove_user_from_group_validated instead for type-safe validated responses. + + Args: + group_id: Unique identifier for the group + user_id: Unique identifier for the user to remove + + Returns: + Raw response object from the API + + Example: + ```python + await auth_client.remove_user_from_group("engineering", "john.doe@example.com") + ``` + """ + return await self._group_api.remove_user_from_group(group_id=group_id, user_id=user_id) + + async def remove_user_from_group_validated(self, group_id: str, user_id: str, **kwargs) -> None: + """Remove a user from a group. + Args: + group_id: Unique identifier for the group + user_id: Unique identifier for the user to remove + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await auth_client.remove_user_from_group_validated("engineering", "john.doe@example.com") + ``` + """ + await self._group_api.remove_user_from_group(group_id=group_id, user_id=user_id, **kwargs) + + @deprecated("add_users_to_group is deprecated; use add_users_to_group_validated instead") + @typing_deprecated("add_users_to_group is deprecated; use add_users_to_group_validated instead") async def add_users_to_group(self, group_id: str, user_ids: List[str]) -> None: - """Add multiple users to a group""" - return await self.group_api.add_users_to_group(group_id=group_id, request_body=user_ids) + """Add multiple users to a group. + + .. deprecated:: + Use add_users_to_group_validated instead for type-safe validated responses. + + Args: + group_id: Unique identifier for the group + user_ids: List of user identifiers to add to the group + + Returns: + None + + Example: + ```python + users = ["john.doe@example.com", "jane.smith@example.com"] + await auth_client.add_users_to_group("engineering", users) + ``` + """ + return await self._group_api.add_users_to_group(group_id=group_id, request_body=user_ids) + + async def add_users_to_group_validated( + self, group_id: str, user_ids: List[str], **kwargs + ) -> None: + """Add multiple users to a group. + + Args: + group_id: Unique identifier for the group + user_ids: List of user identifiers to add to the group + **kwargs: Additional optional parameters to pass to the API - async def remove_users_from_group(self, group_id: str, user_ids: List[str]) -> None: - """Remove multiple users from a group""" - return await self.group_api.remove_users_from_group( - group_id=group_id, request_body=user_ids + Returns: + None + + Example: + ```python + users = ["john.doe@example.com", "jane.smith@example.com"] + await auth_client.add_users_to_group_validated("engineering", users) + ``` + """ + await self._group_api.add_users_to_group(group_id=group_id, request_body=user_ids, **kwargs) + + async def remove_users_from_group(self, group_id: str, user_ids: List[str], **kwargs) -> None: + """Remove multiple users from a group. + + Args: + group_id: Unique identifier for the group + user_ids: List of user identifiers to remove from the group + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + users = ["john.doe@example.com", "jane.smith@example.com"] + await auth_client.remove_users_from_group("engineering", users) + ``` + """ + return await self._group_api.remove_users_from_group( + group_id=group_id, request_body=user_ids, **kwargs ) + @deprecated("get_users_in_group is deprecated; use get_users_in_group_validated instead") + @typing_deprecated("get_users_in_group is deprecated; use get_users_in_group_validated instead") async def get_users_in_group(self, group_id: str) -> object: - """Get all users in a group""" - return await self.group_api.get_users_in_group(id=group_id) + """Get all users in a group. + + .. deprecated:: + Use get_users_in_group_validated instead for type-safe validated responses. + + Args: + group_id: Unique identifier for the group + + Returns: + Raw response object from the API containing list of users + + Example: + ```python + users = await auth_client.get_users_in_group("engineering") + ``` + """ + return await self._group_api.get_users_in_group(id=group_id) + + async def get_users_in_group_validated( + self, group_id: str, **kwargs + ) -> List[Optional[ConductorUserAdapter]]: + """Get all users in a group and return a list of validated ConductorUserAdapters. + + Args: + group_id: Unique identifier for the group + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of ConductorUserAdapter instances representing users in the group + + Example: + ```python + users = await auth_client.get_users_in_group_validated("engineering") + for user in users: + if user: + print(f"User: {user.name}") + ``` + """ + result = await self._group_api.get_users_in_group(id=group_id, **kwargs) + + result_dict = cast(Dict[str, Any], result) + result_model = [ConductorUserAdapter.from_dict(_item) for _item in result_dict] + + return result_model # Permission Operations (Only available operations) - async def grant_permissions(self, authorization_request: AuthorizationRequest) -> object: - """Grant permissions to users or groups""" - return await self.authorization_api.grant_permissions( + @deprecated("grant_permissions is deprecated; use grant_permissions_validated instead") + @typing_deprecated("grant_permissions is deprecated; use grant_permissions_validated instead") + async def grant_permissions(self, authorization_request: AuthorizationRequestAdapter) -> object: + """Grant permissions to users or groups. + + .. deprecated:: + Use grant_permissions_validated instead for type-safe validated responses. + + Args: + authorization_request: Authorization details including subject, target, and access level + + Returns: + Raw response object from the API + + Example: + ```python + from conductor.asyncio_client.adapters.models.authorization_request_adapter import AuthorizationRequestAdapter + from conductor.asyncio_client.adapters.models.subject_ref_adapter import SubjectRefAdapter + from conductor.asyncio_client.adapters.models.target_ref_adapter import TargetRefAdapter + + request = AuthorizationRequestAdapter( + subject=SubjectRefAdapter(type="USER", id="john.doe@example.com"), + target=TargetRefAdapter(type="WORKFLOW_DEF", id="my_workflow"), + access=["READ", "EXECUTE"] + ) + await auth_client.grant_permissions(request) + ``` + """ + return await self._authorization_api.grant_permissions( authorization_request=authorization_request ) - async def remove_permissions(self, authorization_request: AuthorizationRequest) -> object: - """Remove permissions from users or groups""" - return await self.authorization_api.remove_permissions( + async def grant_permissions_validated( + self, authorization_request: AuthorizationRequestAdapter, **kwargs + ) -> None: + """Grant permissions to users or groups. + + Args: + authorization_request: Authorization details including subject, target, and access level + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.authorization_request_adapter import AuthorizationRequestAdapter + from conductor.asyncio_client.adapters.models.subject_ref_adapter import SubjectRefAdapter + from conductor.asyncio_client.adapters.models.target_ref_adapter import TargetRefAdapter + + request = AuthorizationRequestAdapter( + subject=SubjectRefAdapter(type="USER", id="john.doe@example.com"), + target=TargetRefAdapter(type="WORKFLOW_DEF", id="my_workflow"), + access=["READ", "EXECUTE"] + ) + await auth_client.grant_permissions_validated(request) + ``` + """ + await self._authorization_api.grant_permissions( + authorization_request=authorization_request, **kwargs + ) + + @deprecated("remove_permissions is deprecated; use remove_permissions_validated instead") + @typing_deprecated("remove_permissions is deprecated; use remove_permissions_validated instead") + async def remove_permissions( + self, authorization_request: AuthorizationRequestAdapter + ) -> object: + """Remove permissions from users or groups. + + .. deprecated:: + Use remove_permissions_validated instead for type-safe validated responses. + + Args: + authorization_request: Authorization details including subject, target, and access level to remove + + Returns: + Raw response object from the API + + Example: + ```python + from conductor.asyncio_client.adapters.models.authorization_request_adapter import AuthorizationRequestAdapter + from conductor.asyncio_client.adapters.models.subject_ref_adapter import SubjectRefAdapter + from conductor.asyncio_client.adapters.models.target_ref_adapter import TargetRefAdapter + + request = AuthorizationRequestAdapter( + subject=SubjectRefAdapter(type="USER", id="john.doe@example.com"), + target=TargetRefAdapter(type="WORKFLOW_DEF", id="my_workflow"), + access=["EXECUTE"] + ) + await auth_client.remove_permissions(request) + ``` + """ + return await self._authorization_api.remove_permissions( authorization_request=authorization_request ) + async def remove_permissions_validated( + self, authorization_request: AuthorizationRequestAdapter, **kwargs + ) -> None: + """Remove permissions from users or groups. + + Args: + authorization_request: Authorization details including subject, target, and access level to remove + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.authorization_request_adapter import AuthorizationRequestAdapter + from conductor.asyncio_client.adapters.models.subject_ref_adapter import SubjectRefAdapter + from conductor.asyncio_client.adapters.models.target_ref_adapter import TargetRefAdapter + + request = AuthorizationRequestAdapter( + subject=SubjectRefAdapter(type="USER", id="john.doe@example.com"), + target=TargetRefAdapter(type="WORKFLOW_DEF", id="my_workflow"), + access=["EXECUTE"] + ) + await auth_client.remove_permissions_validated(request) + ``` + """ + await self._authorization_api.remove_permissions( + authorization_request=authorization_request, **kwargs + ) + + @deprecated("get_permissions is deprecated; use get_permissions_validated instead") + @typing_deprecated("get_permissions is deprecated; use get_permissions_validated instead") async def get_permissions(self, entity_type: str, entity_id: str) -> object: - """Get permissions for a specific entity (user, group, or application)""" - return await self.authorization_api.get_permissions(type=entity_type, id=entity_id) + """Get permissions for a specific entity (user, group, or application). + + .. deprecated:: + Use get_permissions_validated instead for type-safe validated responses. + + Args: + entity_type: Type of the entity (USER, GROUP, or APPLICATION) + entity_id: Unique identifier for the entity + + Returns: + Raw response object from the API containing permissions + + Example: + ```python + permissions = await auth_client.get_permissions("USER", "john.doe@example.com") + ``` + """ + return await self._authorization_api.get_permissions(type=entity_type, id=entity_id) + + async def get_permissions_validated( + self, target: TargetRefAdapter, **kwargs + ) -> Dict[str, List[SubjectRefAdapter]]: + """Get permissions for a specific entity (user, group, or application). - async def get_group_permissions(self, group_id: str) -> object: - """Get permissions granted to a group""" - return await self.group_api.get_granted_permissions1(group_id=group_id) + Args: + target: Target entity reference containing type and ID + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping access types to lists of SubjectRefAdapter instances + + Example: + ```python + from conductor.asyncio_client.adapters.models.target_ref_adapter import TargetRefAdapter + + target = TargetRefAdapter(type="WORKFLOW_DEF", id="my_workflow") + permissions = await auth_client.get_permissions_validated(target) + for access_type, subjects in permissions.items(): + print(f"Access {access_type}: {[s.id for s in subjects]}") + ``` + """ + result = await self._authorization_api.get_permissions( + type=target.type, id=target.id, **kwargs + ) + + permissions = {} + for access_type, subjects in result.items(): + subject_list = [SubjectRefAdapter(id=sub["id"], type=sub["type"]) for sub in subjects] + permissions[access_type] = subject_list + + return permissions + + async def get_group_permissions(self, group_id: str, **kwargs) -> GrantedAccessResponseAdapter: + """Get permissions granted to a group. + + Args: + group_id: Unique identifier for the group + **kwargs: Additional optional parameters to pass to the API + + Returns: + GrantedAccessResponseAdapter containing all permissions granted to the group + + Example: + ```python + permissions = await auth_client.get_group_permissions("engineering") + if permissions.granted_access: + for access in permissions.granted_access: + print(f"Access: {access.access} on {access.target.type}:{access.target.id}") + ``` + """ + return await self._group_api.get_granted_permissions1(group_id=group_id, **kwargs) # Convenience Methods async def upsert_user( - self, user_id: str, upsert_user_request: UpsertUserRequest + self, user_id: str, upsert_user_request: UpsertUserRequestAdapter, **kwargs ) -> Optional[ConductorUserAdapter]: - """Alias for create_user/update_user""" - result = await self.create_user(user_id, upsert_user_request) - return ConductorUserAdapter.from_dict(result) + """Create or update a user (upsert operation). + + This is an alias for create_user_validated/update_user_validated. + + Args: + user_id: Unique identifier for the user + upsert_user_request: User details to create or update + **kwargs: Additional optional parameters to pass to the API + + Returns: + ConductorUserAdapter instance containing the user details, or None if operation failed + + Example: + ```python + from conductor.asyncio_client.adapters.models.upsert_user_request_adapter import UpsertUserRequestAdapter + + request = UpsertUserRequestAdapter(name="John Doe", roles=["USER"]) + user = await auth_client.upsert_user("john.doe@example.com", request) + ``` + """ + result = await self.create_user_validated(user_id, upsert_user_request, **kwargs) + return result async def upsert_group( - self, group_id: str, upsert_group_request: UpsertGroupRequest + self, group_id: str, upsert_group_request: UpsertGroupRequestAdapter, **kwargs ) -> Optional[GroupAdapter]: - """Alias for create_group/update_group""" - result = await self.create_group(group_id, upsert_group_request) - return GroupAdapter.from_dict(result) + """Create or update a group (upsert operation). + + This is an alias for create_group_validated/update_group. + + Args: + group_id: Unique identifier for the group + upsert_group_request: Group details to create or update + **kwargs: Additional optional parameters to pass to the API + + Returns: + GroupAdapter instance containing the group details, or None if operation failed + + Example: + ```python + from conductor.asyncio_client.adapters.models.upsert_group_request_adapter import UpsertGroupRequestAdapter + + request = UpsertGroupRequestAdapter(description="Engineering team", roles=["WORKER"]) + group = await auth_client.upsert_group("engineering", request) + ``` + """ + result = await self.create_group_validated(group_id, upsert_group_request, **kwargs) + return result + + async def set_application_tags( + self, tags: List[TagAdapter], application_id: str, **kwargs + ) -> None: + """Set tags for an application. + + Args: + tags: List of TagAdapter instances to set on the application + application_id: Unique identifier for the application + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [TagAdapter(type="METADATA", key="environment", value="production")] + await auth_client.set_application_tags(tags, "app-123") + ``` + """ + await self._application_api.put_tag_for_application(id=application_id, tag=tags, **kwargs) + + async def get_application_tags(self, application_id: str, **kwargs) -> List[TagAdapter]: + """Get tags for an application. + + Args: + application_id: Unique identifier for the application + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TagAdapter instances representing tags on the application + + Example: + ```python + tags = await auth_client.get_application_tags("app-123") + for tag in tags: + print(f"Tag: {tag.key}={tag.value}") + ``` + """ + return await self._application_api.get_tags_for_application(id=application_id, **kwargs) + + async def delete_application_tags( + self, tags: List[TagAdapter], application_id: str, **kwargs + ) -> None: + """Delete tags from an application. + + Args: + tags: List of TagAdapter instances to delete from the application + application_id: Unique identifier for the application + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [TagAdapter(type="METADATA", key="environment", value="production")] + await auth_client.delete_application_tags(tags, "app-123") + ``` + """ + await self._application_api.delete_tag_for_application( + id=application_id, tag=tags, **kwargs + ) + + @deprecated("create_access_key is deprecated; use create_access_key_validated instead") + @typing_deprecated("create_access_key is deprecated; use create_access_key_validated instead") + async def create_access_key(self, application_id: str) -> object: + """Create an access key for an application. - async def set_application_tags(self, tags: List[TagAdapter], application_id: str): - await self.application_api.put_tag_for_application(application_id, tags) + .. deprecated:: + Use create_access_key_validated instead for type-safe validated responses. - async def get_application_tags(self, application_id: str) -> List[TagAdapter]: - return await self.application_api.get_tags_for_application(application_id) + Args: + application_id: Unique identifier for the application - async def delete_application_tags(self, tags: List[TagAdapter], application_id: str): - await self.application_api.delete_tag_for_application(application_id, tags) + Returns: + Raw response object from the API containing the created access key - async def create_access_key(self, application_id: str) -> AccessKey: - key_obj = await self.application_api.create_access_key(application_id) + Example: + ```python + key = await auth_client.create_access_key("app-123") + ``` + """ + key_obj = await self._application_api.create_access_key(application_id) return key_obj - async def get_access_keys(self, application_id: str) -> List[AccessKey]: - access_keys_obj = await self.application_api.get_access_keys(application_id) - return list(access_keys_obj) + async def create_access_key_validated( + self, application_id: str, **kwargs + ) -> CreatedAccessKeyAdapter: + """Create an access key for an application. + + Args: + application_id: Unique identifier for the application + **kwargs: Additional optional parameters to pass to the API + + Returns: + CreatedAccessKeyAdapter instance containing the access key details including the secret + + Example: + ```python + key = await auth_client.create_access_key_validated("app-123") + print(f"Key ID: {key.id}, Secret: {key.secret}") + # Note: The secret is only returned once during creation + ``` + """ + result = await self._application_api.create_access_key(id=application_id, **kwargs) + + result_dict = cast(Dict[str, Any], result) + result_model = CreatedAccessKeyAdapter.from_dict(result_dict) + + return result_model + + @deprecated("get_access_keys is deprecated; use get_access_keys_validated instead") + @typing_deprecated("get_access_keys is deprecated; use get_access_keys_validated instead") + async def get_access_keys(self, application_id: str) -> object: + """Get access keys for an application. + + .. deprecated:: + Use get_access_keys_validated instead for type-safe validated responses. + + Args: + application_id: Unique identifier for the application + + Returns: + Raw response object from the API containing list of access keys + + Example: + ```python + keys = await auth_client.get_access_keys("app-123") + ``` + """ + access_keys_obj = await self._application_api.get_access_keys(application_id) + return access_keys_obj + + async def get_access_keys_validated( + self, application_id: str, **kwargs + ) -> List[AccessKeyAdapter]: + """Get access keys for an application. + + Args: + application_id: Unique identifier for the application + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of AccessKeyAdapter instances representing all access keys for the application + + Example: + ```python + keys = await auth_client.get_access_keys_validated("app-123") + for key in keys: + print(f"Key ID: {key.id}, Active: {key.enabled}") + ``` + """ + result = await self._application_api.get_access_keys(application_id, **kwargs) + + result_dict = cast(Dict[str, Any], result) + result_model = [AccessKeyAdapter.from_dict(_item) for _item in result_dict] - async def toggle_access_key_status(self, application_id: str, key_id: str) -> AccessKey: - key_obj = await self.application_api.toggle_access_key_status(application_id, key_id) + return result_model + + @deprecated( + "toggle_access_key_status is deprecated; use toggle_access_key_status_validated instead" + ) + @typing_deprecated( + "toggle_access_key_status is deprecated; use toggle_access_key_status_validated instead" + ) + async def toggle_access_key_status(self, application_id: str, key_id: str) -> object: + """Toggle the status (active/inactive) of an access key. + + .. deprecated:: + Use toggle_access_key_status_validated instead for type-safe validated responses. + + Args: + application_id: Unique identifier for the application + key_id: Unique identifier for the access key + + Returns: + Raw response object from the API + + Example: + ```python + await auth_client.toggle_access_key_status("app-123", "key-456") + ``` + """ + key_obj = await self._application_api.toggle_access_key_status(application_id, key_id) return key_obj - async def delete_access_key(self, application_id: str, key_id: str): - await self.application_api.delete_access_key(application_id, key_id) + async def toggle_access_key_status_validated( + self, application_id: str, key_id: str, **kwargs + ) -> AccessKeyAdapter: + """Toggle the status (active/inactive) of an access key. + + Args: + application_id: Unique identifier for the application + key_id: Unique identifier for the access key + **kwargs: Additional optional parameters to pass to the API + + Returns: + AccessKeyAdapter instance with the updated status + + Example: + ```python + key = await auth_client.toggle_access_key_status_validated("app-123", "key-456") + print(f"Key is now: {'enabled' if key.enabled else 'disabled'}") + ``` + """ + result = await self._application_api.toggle_access_key_status( + application_id=application_id, key_id=key_id, **kwargs + ) + + result_dict = cast(Dict[str, Any], result) + result_model = AccessKeyAdapter.from_dict(result_dict) + + return result_model + + async def delete_access_key(self, application_id: str, key_id: str, **kwargs) -> None: + """Delete an access key from an application. + + Args: + application_id: Unique identifier for the application + key_id: Unique identifier for the access key to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await auth_client.delete_access_key("app-123", "key-456") + ``` + """ + await self._application_api.delete_access_key( + application_id=application_id, key_id=key_id, **kwargs + ) + + async def add_role_to_application_user(self, application_id: str, role: str, **kwargs) -> None: + """Add a role to an application user. - async def add_role_to_application_user(self, application_id: str, role: str): - await self.application_api.add_role_to_application_user(application_id, role) + Args: + application_id: Unique identifier for the application + role: Role name to add (e.g., "WORKER", "ADMIN") + **kwargs: Additional optional parameters to pass to the API - async def remove_role_from_application_user(self, application_id: str, role: str): - await self.application_api.remove_role_from_application_user(application_id, role) + Returns: + None - async def get_granted_permissions_for_group(self, group_id: str) -> List[GrantedAccess]: - granted_access_obj = await self.group_api.get_granted_permissions1(group_id) + Example: + ```python + await auth_client.add_role_to_application_user("app-123", "WORKER") + ``` + """ + await self._application_api.add_role_to_application_user( + application_id=application_id, role=role, **kwargs + ) + + async def remove_role_from_application_user( + self, application_id: str, role: str, **kwargs + ) -> None: + """Remove a role from an application user. + + Args: + application_id: Unique identifier for the application + role: Role name to remove (e.g., "WORKER", "ADMIN") + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await auth_client.remove_role_from_application_user("app-123", "WORKER") + ``` + """ + await self._application_api.remove_role_from_application_user( + application_id=application_id, role=role, **kwargs + ) + + async def get_granted_permissions_for_group( + self, group_id: str, **kwargs + ) -> List[GrantedAccessAdapter]: + """Get granted permissions for a group in a simplified format. + + Args: + group_id: Unique identifier for the group + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of GrantedAccessAdapter instances containing target and access information + + Example: + ```python + permissions = await auth_client.get_granted_permissions_for_group("engineering") + for perm in permissions: + print(f"Access {perm.access} on {perm.target.type}:{perm.target.id}") + ``` + """ + granted_access_obj = await self.get_group_permissions(group_id=group_id, **kwargs) if not granted_access_obj.granted_access: return [] @@ -229,25 +1334,105 @@ async def get_granted_permissions_for_group(self, group_id: str) -> List[Granted if not ga.target: continue - target = TargetRef(type=ga.target.type, id=ga.target.id) + target = TargetRefAdapter(type=ga.target.type, id=ga.target.id) access = ga.access - granted_permissions.append(GrantedAccess(target=target, access=access)) + granted_permissions.append(GrantedAccessAdapter(target=target, access=access)) return granted_permissions - async def get_granted_permissions_for_user(self, user_id: str) -> List[GrantedAccess]: - granted_access_obj = cast( - Dict[str, Any], await self.user_api.get_granted_permissions(user_id) - ) + async def get_granted_permissions_for_user( + self, user_id: str, **kwargs + ) -> List[GrantedAccessAdapter]: + """Get granted permissions for a user in a simplified format. + + Args: + user_id: Unique identifier for the user + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of GrantedAccessAdapter instances containing target and access information + + Example: + ```python + permissions = await auth_client.get_granted_permissions_for_user("john.doe@example.com") + for perm in permissions: + print(f"User has {perm.access} access to {perm.target.type}:{perm.target.id}") + ``` + """ + granted_access_obj = await self.get_user_permissions(user_id=user_id, **kwargs) + + if granted_access_obj is None or granted_access_obj.granted_access is None: + return [] + granted_permissions = [] - for ga in granted_access_obj["grantedAccess"]: - target = TargetRef(type=ga["target"]["type"], id=ga["target"]["id"]) - access = ga["access"] - granted_permissions.append(GrantedAccess(target=target, access=access)) + for ga in granted_access_obj.granted_access: + if ga.target is None: + continue + + target = TargetRefAdapter(type=ga.target.type, id=ga.target.id) + access = ga.access + granted_permissions.append(GrantedAccessAdapter(target=target, access=access)) + return granted_permissions async def get_app_by_access_key_id( - self, access_key_id: str, *args, **kwargs + self, access_key_id: str, **kwargs ) -> Optional[ExtendedConductorApplicationAdapter]: - result = await self.application_api.get_app_by_access_key_id(access_key_id, *args, **kwargs) - return ExtendedConductorApplicationAdapter.from_dict(result) + """Get an application by its access key ID. + + Args: + access_key_id: Unique identifier for the access key + **kwargs: Additional optional parameters to pass to the API + + Returns: + ExtendedConductorApplicationAdapter instance for the application, or None if not found + + Example: + ```python + app = await auth_client.get_app_by_access_key_id("key-123") + if app: + print(f"Application: {app.name}, Owner: {app.owner}") + ``` + """ + result = await self._application_api.get_app_by_access_key_id( + access_key_id=access_key_id, **kwargs + ) + + result_dict = cast(Dict[str, Any], result) + result_model = ExtendedConductorApplicationAdapter.from_dict(result_dict) + + return result_model + + async def check_permissions( + self, user_id: str, type: str, id: str, **kwargs + ) -> Dict[str, bool]: + """Check what permissions a user has on a specific resource. + + Args: + user_id: Unique identifier for the user + type: Type of resource (e.g., "WORKFLOW_DEF", "TASK_DEF") + id: Unique identifier for the resource + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping permission types (READ, EXECUTE, UPDATE, DELETE) to boolean values + + Example: + ```python + permissions = await auth_client.check_permissions( + "john.doe@example.com", + "WORKFLOW_DEF", + "my_workflow" + ) + if permissions.get("EXECUTE"): + print("User can execute this workflow") + if permissions.get("UPDATE"): + print("User can update this workflow") + ``` + """ + result = await self._user_api.check_permissions(user_id=user_id, type=type, id=id, **kwargs) + + result_dict = cast(Dict[str, Any], result) + result_model = {k: v for k, v in result_dict.items() if isinstance(v, bool)} + + return result_model diff --git a/src/conductor/asyncio_client/orkes/orkes_base_client.py b/src/conductor/asyncio_client/orkes/orkes_base_client.py index fd3891785..d598aaf47 100644 --- a/src/conductor/asyncio_client/orkes/orkes_base_client.py +++ b/src/conductor/asyncio_client/orkes/orkes_base_client.py @@ -1,4 +1,8 @@ import logging +import warnings + +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.api.application_resource_api import ( @@ -7,6 +11,7 @@ from conductor.asyncio_client.adapters.api.authorization_resource_api import ( AuthorizationResourceApiAdapter, ) +from conductor.asyncio_client.adapters.api.event_resource_api import EventResourceApiAdapter from conductor.asyncio_client.adapters.api.group_resource_api import GroupResourceApiAdapter from conductor.asyncio_client.adapters.api.integration_resource_api import ( IntegrationResourceApiAdapter, @@ -20,10 +25,6 @@ from conductor.asyncio_client.adapters.api.task_resource_api import TaskResourceApiAdapter from conductor.asyncio_client.adapters.api.user_resource_api import UserResourceApiAdapter from conductor.asyncio_client.adapters.api.workflow_resource_api import WorkflowResourceApiAdapter -from conductor.asyncio_client.adapters.api.event_resource_api import EventResourceApiAdapter -from conductor.asyncio_client.adapters.api.event_execution_resource_api import ( - EventExecutionResourceApiAdapter, -) from conductor.asyncio_client.configuration.configuration import Configuration @@ -52,18 +53,325 @@ def __init__(self, configuration: Configuration, api_client: ApiClient): self.logger = logging.getLogger(__name__) # Initialize all API clients - self.metadata_api = MetadataResourceApiAdapter(self.api_client) - self.task_api = TaskResourceApiAdapter(self.api_client) - self.workflow_api = WorkflowResourceApiAdapter(self.api_client) - self.application_api = ApplicationResourceApiAdapter(self.api_client) - self.secret_api = SecretResourceApiAdapter(self.api_client) - self.user_api = UserResourceApiAdapter(self.api_client) - self.group_api = GroupResourceApiAdapter(self.api_client) - self.authorization_api = AuthorizationResourceApiAdapter(self.api_client) - self.scheduler_api = SchedulerResourceApiAdapter(self.api_client) - self.tags_api = TagsApiAdapter(self.api_client) - self.integration_api = IntegrationResourceApiAdapter(self.api_client) - self.prompt_api = PromptResourceApiAdapter(self.api_client) - self.schema_api = SchemaResourceApiAdapter(self.api_client) - self.event_api = EventResourceApiAdapter(self.api_client) - self.event_execution_api = EventExecutionResourceApiAdapter(self.api_client) + self._metadata_api = MetadataResourceApiAdapter(self.api_client) + self._task_api = TaskResourceApiAdapter(self.api_client) + self._workflow_api = WorkflowResourceApiAdapter(self.api_client) + self._application_api = ApplicationResourceApiAdapter(self.api_client) + self._secret_api = SecretResourceApiAdapter(self.api_client) + self._user_api = UserResourceApiAdapter(self.api_client) + self._group_api = GroupResourceApiAdapter(self.api_client) + self._authorization_api = AuthorizationResourceApiAdapter(self.api_client) + self._scheduler_api = SchedulerResourceApiAdapter(self.api_client) + self._tags_api = TagsApiAdapter(self.api_client) + self._integration_api = IntegrationResourceApiAdapter(self.api_client) + self._prompt_api = PromptResourceApiAdapter(self.api_client) + self._schema_api = SchemaResourceApiAdapter(self.api_client) + self._event_api = EventResourceApiAdapter(self.api_client) + + @property + @typing_deprecated( + "metadata_api is deprecated; use OrkesMetadataClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "metadata_api is deprecated; use OrkesMetadataClient instead. " + "This attribute will be removed in a future version." + ) + def metadata_api(self) -> MetadataResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesMetadataClient` methods instead. + """ + warnings.warn( + "'metadata_api' is deprecated and will be removed in a future release. " + "Use `OrkesMetadataClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._metadata_api + + @property + @typing_deprecated( + "task_api is deprecated; use OrkesTaskClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "task_api is deprecated; use OrkesTaskClient instead. " + "This attribute will be removed in a future version." + ) + def task_api(self) -> TaskResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesTaskClient` methods instead. + """ + warnings.warn( + "'task_api' is deprecated and will be removed in a future release. " + "Use `OrkesTaskClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._task_api + + @property + @typing_deprecated( + "workflow_api is deprecated; use OrkesWorkflowClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "workflow_api is deprecated; use OrkesWorkflowClient instead. " + "This attribute will be removed in a future version." + ) + def workflow_api(self) -> WorkflowResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesWorkflowClient` methods instead. + """ + warnings.warn( + "'workflow_api' is deprecated and will be removed in a future release. " + "Use `OrkesWorkflowClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._workflow_api + + @property + @typing_deprecated( + "application_api is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "application_api is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + def application_api(self) -> ApplicationResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesAuthorizationClient` methods instead. + """ + warnings.warn( + "'application_api' is deprecated and will be removed in a future release. " + "Use `OrkesAuthorizationClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._application_api + + @property + @typing_deprecated( + "secret_api is deprecated; use OrkesSecretClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "secret_api is deprecated; use OrkesSecretClient instead. " + "This attribute will be removed in a future version." + ) + def secret_api(self) -> SecretResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesSecretClient` methods instead. + """ + warnings.warn( + "'secret_api' is deprecated and will be removed in a future release. " + "Use `OrkesSecretClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._secret_api + + @property + @typing_deprecated( + "user_api is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "user_api is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + def user_api(self) -> UserResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesAuthorizationClient` methods instead. + """ + warnings.warn( + "'user_api' is deprecated and will be removed in a future release. " + "Use `OrkesAuthorizationClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._user_api + + @property + @typing_deprecated( + "group_api is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "group_api is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + def group_api(self) -> GroupResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesAuthorizationClient` methods instead. + """ + warnings.warn( + "'group_api' is deprecated and will be removed in a future release. " + "Use `OrkesAuthorizationClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._group_api + + @property + @typing_deprecated( + "authorization_api is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "authorization_api is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + def authorization_api(self) -> AuthorizationResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesAuthorizationClient` methods instead. + """ + warnings.warn( + "'authorization_api' is deprecated and will be removed in a future release. " + "Use `OrkesAuthorizationClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._authorization_api + + @property + @typing_deprecated( + "scheduler_api is deprecated; use OrkesSchedulerClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "scheduler_api is deprecated; use OrkesSchedulerClient instead. " + "This attribute will be removed in a future version." + ) + def scheduler_api(self) -> SchedulerResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesSchedulerClient` methods instead. + """ + warnings.warn( + "'scheduler_api' is deprecated and will be removed in a future release. " + "Use `OrkesSchedulerClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._scheduler_api + + @property + @typing_deprecated( + "tags_api is deprecated; use OrkesTagsClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "tags_api is deprecated; use OrkesTagsClient instead. " + "This attribute will be removed in a future version." + ) + def tags_api(self) -> TagsApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesTagsClient` methods instead. + """ + warnings.warn( + "'tags_api' is deprecated and will be removed in a future release. " + "Use `OrkesTagsClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._tags_api + + @property + @typing_deprecated( + "integration_api is deprecated; use OrkesIntegrationClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "integration_api is deprecated; use OrkesIntegrationClient instead. " + "This attribute will be removed in a future version." + ) + def integration_api(self) -> IntegrationResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesIntegrationClient` methods instead. + """ + warnings.warn( + "'integration_api' is deprecated and will be removed in a future release. " + "Use `OrkesIntegrationClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._integration_api + + @property + @typing_deprecated( + "prompt_api is deprecated; use OrkesPromptClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "prompt_api is deprecated; use OrkesPromptClient instead. " + "This attribute will be removed in a future version." + ) + def prompt_api(self) -> PromptResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesPromptClient` methods instead. + """ + warnings.warn( + "'prompt_api' is deprecated and will be removed in a future release. " + "Use `OrkesPromptClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._prompt_api + + @property + @typing_deprecated( + "schema_api is deprecated; use OrkesSchemaClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "schema_api is deprecated; use OrkesSchemaClient instead. " + "This attribute will be removed in a future version." + ) + def schema_api(self) -> SchemaResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesSchemaClient` methods instead. + """ + warnings.warn( + "'schema_api' is deprecated and will be removed in a future release. " + "Use `OrkesSchemaClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._schema_api + + @property + @typing_deprecated( + "event_api is deprecated; use OrkesEventClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "event_api is deprecated; use OrkesEventClient instead. " + "This attribute will be removed in a future version." + ) + def event_api(self) -> EventResourceApiAdapter: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesEventClient` methods instead. + """ + warnings.warn( + "'event_api' is deprecated and will be removed in a future release. " + "Use `OrkesEventClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._event_api diff --git a/src/conductor/asyncio_client/orkes/orkes_clients.py b/src/conductor/asyncio_client/orkes/orkes_clients.py index fd2a1049d..1c064b029 100644 --- a/src/conductor/asyncio_client/orkes/orkes_clients.py +++ b/src/conductor/asyncio_client/orkes/orkes_clients.py @@ -5,6 +5,7 @@ from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.configuration.configuration import Configuration from conductor.asyncio_client.orkes.orkes_authorization_client import OrkesAuthorizationClient +from conductor.asyncio_client.orkes.orkes_event_client import OrkesEventClient from conductor.asyncio_client.orkes.orkes_integration_client import OrkesIntegrationClient from conductor.asyncio_client.orkes.orkes_metadata_client import OrkesMetadataClient from conductor.asyncio_client.orkes.orkes_prompt_client import OrkesPromptClient @@ -13,7 +14,6 @@ from conductor.asyncio_client.orkes.orkes_secret_client import OrkesSecretClient from conductor.asyncio_client.orkes.orkes_task_client import OrkesTaskClient from conductor.asyncio_client.orkes.orkes_workflow_client import OrkesWorkflowClient -from conductor.asyncio_client.orkes.orkes_event_client import OrkesEventClient from conductor.asyncio_client.workflow.executor.workflow_executor import AsyncWorkflowExecutor diff --git a/src/conductor/asyncio_client/orkes/orkes_event_client.py b/src/conductor/asyncio_client/orkes/orkes_event_client.py index 99549e21d..f6534fda9 100644 --- a/src/conductor/asyncio_client/orkes/orkes_event_client.py +++ b/src/conductor/asyncio_client/orkes/orkes_event_client.py @@ -1,314 +1,462 @@ from __future__ import annotations -from typing import List - +from typing import Any, Dict, List + +from conductor.asyncio_client.adapters.models.connectivity_test_input_adapter import ( + ConnectivityTestInputAdapter, +) +from conductor.asyncio_client.adapters.models.connectivity_test_result_adapter import ( + ConnectivityTestResultAdapter, +) from conductor.asyncio_client.adapters.models.event_handler_adapter import EventHandlerAdapter from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter from conductor.asyncio_client.orkes.orkes_base_client import OrkesBaseClient class OrkesEventClient(OrkesBaseClient): - """Event management client for Orkes Conductor platform. - - Provides comprehensive event handling capabilities including event handler - management, tag operations, queue configuration, and event execution monitoring. - """ - # Event Handler Operations - async def create_event_handler(self, event_handler: List[EventHandlerAdapter]) -> None: + async def create_event_handler( + self, event_handler: List[EventHandlerAdapter], **kwargs + ) -> None: """Create a new event handler. Creates one or more event handlers that will be triggered by specific events. Event handlers define what actions to take when certain events occur in the system. - Parameters: - ----------- - event_handler : List[EventHandlerAdapter] - List of event handler configurations to create + Args: + event_handler: List of event handler configurations to create + **kwargs: Additional optional parameters to pass to the API - Example: - -------- - ```python - from conductor.asyncio_client.adapters.models.event_handler_adapter import EventHandlerAdapter - from conductor.asyncio_client.adapters.models.action_adapter import ActionAdapter - - # Create an event handler - event_handler = EventHandlerAdapter( - name="workflow_trigger", - event="workflow.completed", - active=True, - condition="payload.status == 'COMPLETED'", - actions=[ - ActionAdapter( - action="start_workflow", - workflow_id="notification_workflow", - input_parameters={"message": "Workflow completed successfully"} - ) - ] - ) + Returns: + None - await event_client.create_event_handler([event_handler]) - ``` + Example: + ```python + from conductor.asyncio_client.adapters.models.event_handler_adapter import EventHandlerAdapter + from conductor.asyncio_client.adapters.models.action_adapter import ActionAdapter + + # Create an event handler + event_handler = EventHandlerAdapter( + name="workflow_trigger", + event="workflow.completed", + active=True, + condition="payload.status == 'COMPLETED'", + actions=[ + ActionAdapter( + action="start_workflow", + workflow_id="notification_workflow", + input_parameters={"message": "Workflow completed successfully"} + ) + ] + ) + + await event_client.create_event_handler([event_handler]) + ``` """ - return await self.event_api.add_event_handler(event_handler) + return await self._event_api.add_event_handler(event_handler=event_handler, **kwargs) - async def get_event_handler(self, name: str) -> EventHandlerAdapter: + async def get_event_handler(self, name: str, **kwargs) -> EventHandlerAdapter: """Get event handler by name. Retrieves a specific event handler configuration by its name. - Parameters: - ----------- - name : str - The name of the event handler to retrieve + Args: + name: The name of the event handler to retrieve + **kwargs: Additional optional parameters to pass to the API Returns: - -------- - EventHandlerAdapter - The event handler configuration + EventHandlerAdapter instance containing the event handler configuration Example: - -------- - ```python - # Get a specific event handler - handler = await event_client.get_event_handler("workflow_trigger") - print(f"Handler event: {handler.event}") - print(f"Handler active: {handler.active}") - ``` + ```python + # Get a specific event handler + handler = await event_client.get_event_handler("workflow_trigger") + print(f"Handler event: {handler.event}") + print(f"Handler active: {handler.active}") + ``` """ - return await self.event_api.get_event_handler_by_name(name=name) + return await self._event_api.get_event_handler_by_name(name=name, **kwargs) - async def list_event_handlers(self) -> List[EventHandlerAdapter]: + async def list_event_handlers(self, **kwargs) -> List[EventHandlerAdapter]: """List all event handlers. Retrieves all event handlers configured in the system. + Args: + **kwargs: Additional optional parameters to pass to the API + Returns: - -------- - List[EventHandlerAdapter] - List of all event handler configurations + List of EventHandlerAdapter instances representing all event handler configurations Example: - -------- - ```python - # List all event handlers - handlers = await event_client.list_event_handlers() - for handler in handlers: - print(f"Handler: {handler.name}, Event: {handler.event}, Active: {handler.active}") - ``` + ```python + # List all event handlers + handlers = await event_client.list_event_handlers() + for handler in handlers: + print(f"Handler: {handler.name}, Event: {handler.event}, Active: {handler.active}") + ``` """ - return await self.event_api.get_event_handlers() + return await self._event_api.get_event_handlers(**kwargs) - async def list_event_handlers_for_event(self, event: str) -> List[EventHandlerAdapter]: + async def list_event_handlers_for_event( + self, event: str, **kwargs + ) -> List[EventHandlerAdapter]: """List event handlers for a specific event. Retrieves all event handlers that are configured to respond to a specific event type. - Parameters: - ----------- - event : str - The event type to filter handlers by (e.g., "workflow.completed", "task.failed") + Args: + event: The event type to filter handlers by (e.g., "workflow.completed", "task.failed") + **kwargs: Additional optional parameters to pass to the API Returns: - -------- - List[EventHandlerAdapter] - List of event handlers that respond to the specified event + List of EventHandlerAdapter instances that respond to the specified event Example: - -------- - ```python - # Get handlers for workflow completion events - handlers = await event_client.list_event_handlers_for_event("workflow.completed") - print(f"Found {len(handlers)} handlers for workflow.completed events") - - # Get handlers for task failure events - failure_handlers = await event_client.list_event_handlers_for_event("task.failed") - ``` + ```python + # Get handlers for workflow completion events + handlers = await event_client.list_event_handlers_for_event("workflow.completed") + print(f"Found {len(handlers)} handlers for workflow.completed events") + + # Get handlers for task failure events + failure_handlers = await event_client.list_event_handlers_for_event("task.failed") + ``` """ - return await self.event_api.get_event_handlers_for_event(event=event) + return await self._event_api.get_event_handlers_for_event(event=event, **kwargs) - async def update_event_handler(self, event_handler: EventHandlerAdapter) -> None: + async def update_event_handler(self, event_handler: EventHandlerAdapter, **kwargs) -> None: """Update an existing event handler. Updates the configuration of an existing event handler. The handler is identified by its name field. - Parameters: - ----------- - event_handler : EventHandlerAdapter - Event handler configuration to update + Args: + event_handler: Event handler configuration to update + **kwargs: Additional optional parameters to pass to the API + + Returns: + None Example: - -------- - ```python - # Update an existing event handler - handler = await event_client.get_event_handler("workflow_trigger") - handler.active = False # Disable the handler - handler.condition = "payload.status == 'COMPLETED' AND payload.priority == 'HIGH'" - - await event_client.update_event_handler(handler) - ``` + ```python + # Update an existing event handler + handler = await event_client.get_event_handler("workflow_trigger") + handler.active = False # Disable the handler + handler.condition = "payload.status == 'COMPLETED' AND payload.priority == 'HIGH'" + + await event_client.update_event_handler(handler) + ``` """ - return await self.event_api.update_event_handler(event_handler) + return await self._event_api.update_event_handler(event_handler=event_handler, **kwargs) - async def delete_event_handler(self, name: str) -> None: + async def delete_event_handler(self, name: str, **kwargs) -> None: """Delete an event handler by name. Permanently removes an event handler from the system. - Parameters: - ----------- - name : str - The name of the event handler to delete + Args: + name: The name of the event handler to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None Example: - -------- - ```python - # Delete an event handler - await event_client.delete_event_handler("old_workflow_trigger") - print("Event handler deleted successfully") - ``` + ```python + # Delete an event handler + await event_client.delete_event_handler("old_workflow_trigger") + print("Event handler deleted successfully") + ``` """ - return await self.event_api.remove_event_handler_status(name=name) + return await self._event_api.remove_event_handler_status(name=name, **kwargs) # Event Handler Tag Operations - async def get_event_handler_tags(self, name: str) -> List[TagAdapter]: + async def get_event_handler_tags(self, name: str, **kwargs) -> List[TagAdapter]: """Get tags for an event handler. Retrieves all tags associated with a specific event handler. Tags are used for organizing and categorizing event handlers. - Parameters: - ----------- - name : str - The name of the event handler + Args: + name: The name of the event handler + **kwargs: Additional optional parameters to pass to the API Returns: - -------- - List[TagAdapter] - List of tags associated with the event handler + List of TagAdapter instances associated with the event handler Example: - -------- - ```python - # Get tags for an event handler - tags = await event_client.get_event_handler_tags("workflow_trigger") - for tag in tags: - print(f"Tag: {tag.key} = {tag.value}") - ``` + ```python + # Get tags for an event handler + tags = await event_client.get_event_handler_tags("workflow_trigger") + for tag in tags: + print(f"Tag: {tag.key} = {tag.value}") + ``` """ - return await self.event_api.get_tags_for_event_handler(name=name) + return await self._event_api.get_tags_for_event_handler(name=name, **kwargs) - async def add_event_handler_tag(self, name: str, tags: List[TagAdapter]) -> None: + async def add_event_handler_tag(self, name: str, tags: List[TagAdapter], **kwargs) -> None: """Add tags to an event handler. Associates one or more tags with an event handler for organization and categorization. - Parameters: - ----------- - name : str - The name of the event handler - tags : List[TagAdapter] - List of tags to add to the event handler + Args: + name: The name of the event handler + tags: List of tags to add to the event handler + **kwargs: Additional optional parameters to pass to the API + + Returns: + None Example: - -------- - ```python - from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter - - # Add tags to an event handler - tags = [ - TagAdapter(key="environment", value="production"), - TagAdapter(key="team", value="platform"), - TagAdapter(key="priority", value="high") - ] - - await event_client.add_event_handler_tag("workflow_trigger", tags) - ``` + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + # Add tags to an event handler + tags = [ + TagAdapter(key="environment", value="production"), + TagAdapter(key="team", value="platform"), + TagAdapter(key="priority", value="high") + ] + + await event_client.add_event_handler_tag("workflow_trigger", tags) + ``` """ # Note: Async API uses (name=name, tag=tags) keyword args to match the server signature. # Sync API uses (tags, name) positional args due to swagger-codegen parameter ordering. - return await self.event_api.put_tag_for_event_handler(name=name, tag=tags) + return await self._event_api.put_tag_for_event_handler(name=name, tag=tags, **kwargs) - async def remove_event_handler_tag(self, name: str, tags: List[TagAdapter]) -> None: + async def remove_event_handler_tag(self, name: str, tags: List[TagAdapter], **kwargs) -> None: """Remove tags from an event handler. Removes one or more tags from an event handler. - Parameters: - ----------- - name : str - The name of the event handler - tags : List[TagAdapter] - List of tags to remove from the event handler + Args: + name: The name of the event handler + tags: List of tags to remove from the event handler + **kwargs: Additional optional parameters to pass to the API + + Returns: + None Example: - -------- - ```python - from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter - - # Remove specific tags from an event handler - tags_to_remove = [ - TagAdapter(key="environment", value="production"), - TagAdapter(key="priority", value="high") - ] - - await event_client.remove_event_handler_tag("workflow_trigger", tags_to_remove) - ``` + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + # Remove specific tags from an event handler + tags_to_remove = [ + TagAdapter(key="environment", value="production"), + TagAdapter(key="priority", value="high") + ] + + await event_client.remove_event_handler_tag("workflow_trigger", tags_to_remove) + ``` """ # Note: Async API uses (name=name, tag=tags) keyword args to match the server signature. # Sync API uses (tags, name) positional args due to swagger-codegen parameter ordering. - return await self.event_api.delete_tag_for_event_handler(name=name, tag=tags) + return await self._event_api.delete_tag_for_event_handler(name=name, tag=tags, **kwargs) # Queue Configuration Operations - async def get_queue_configuration(self, queue_type: str, queue_name: str) -> dict: + async def get_queue_configuration( + self, queue_type: str, queue_name: str, **kwargs + ) -> Dict[str, object]: """Get queue configuration. Retrieves the configuration for a specific event queue. - Parameters: - ----------- - queue_type : str - The type of queue (e.g., "kafka", "sqs", "rabbitmq") - queue_name : str - The name of the queue + Args: + queue_type: The type of queue (e.g., "kafka", "sqs", "rabbitmq") + queue_name: The name of the queue + **kwargs: Additional optional parameters to pass to the API Returns: - -------- - dict - Queue configuration settings + Dictionary containing queue configuration settings Example: - -------- - ```python - # Get Kafka queue configuration - config = await event_client.get_queue_configuration("kafka", "workflow_events") - print(f"Bootstrap servers: {config.get('bootstrapServers')}") - print(f"Topic: {config.get('topic')}") - ``` + ```python + # Get Kafka queue configuration + config = await event_client.get_queue_configuration("kafka", "workflow_events") + print(f"Bootstrap servers: {config.get('bootstrapServers')}") + print(f"Topic: {config.get('topic')}") + ``` """ - return await self.event_api.get_queue_config(queue_type=queue_type, queue_name=queue_name) + return await self._event_api.get_queue_config( + queue_type=queue_type, queue_name=queue_name, **kwargs + ) - async def delete_queue_configuration(self, queue_type: str, queue_name: str) -> None: + async def delete_queue_configuration(self, queue_type: str, queue_name: str, **kwargs) -> None: """Delete queue configuration. Removes the configuration for an event queue. - Parameters: - ----------- - queue_type : str - The type of queue (e.g., "kafka", "sqs", "rabbitmq") - queue_name : str - The name of the queue + Args: + queue_type: The type of queue (e.g., "kafka", "sqs", "rabbitmq") + queue_name: The name of the queue + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Delete a queue configuration + await event_client.delete_queue_configuration("kafka", "old_workflow_events") + print("Queue configuration deleted") + ``` + """ + return await self._event_api.delete_queue_config( + queue_type=queue_type, queue_name=queue_name, **kwargs + ) + + async def get_queue_names(self, **kwargs) -> Dict[str, str]: + """Get all queue names. + + Retrieves all queue names configured in the system. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping queue names to their types + + Example: + ```python + # Get all configured queue names + queues = await event_client.get_queue_names() + for queue_name, queue_type in queues.items(): + print(f"Queue: {queue_name}, Type: {queue_type}") + ``` + """ + return await self._event_api.get_queue_names(**kwargs) + + async def handle_incoming_event( + self, request_body: Dict[str, Dict[str, Any]], **kwargs + ) -> None: + """Handle an incoming event. + + Processes an incoming event from an external system. This method is typically + used for webhook integrations or custom event sources. + + Args: + request_body: The incoming event request body containing event data + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Handle an incoming webhook event + event_data = { + "event_type": "workflow.completed", + "payload": { + "workflowId": "abc123", + "status": "COMPLETED", + "output": {"result": "success"} + } + } + await event_client.handle_incoming_event(event_data) + ``` + """ + return await self._event_api.handle_incoming_event(request_body=request_body, **kwargs) + + async def put_queue_configuration( + self, queue_type: str, queue_name: str, body: str, **kwargs + ) -> None: + """Create or update queue configuration. + + Creates or updates the configuration for an event queue. This configures + how Conductor connects to external message queues. + + Args: + queue_type: The type of queue (e.g., "kafka", "sqs", "rabbitmq", "amqp") + queue_name: The name of the queue + body: The queue configuration as a JSON string + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + import json + + # Configure a Kafka queue + kafka_config = { + "bootstrapServers": "localhost:9092", + "topic": "workflow_events", + "groupId": "conductor-consumer", + "consumerConfig": { + "auto.offset.reset": "earliest" + } + } + await event_client.put_queue_configuration( + "kafka", + "workflow_events", + json.dumps(kafka_config) + ) + ``` + """ + return await self._event_api.put_queue_config( + queue_type=queue_type, queue_name=queue_name, body=body, **kwargs + ) + + async def test(self, **kwargs) -> EventHandlerAdapter: + """Test an event handler. + + Tests an event handler configuration without actually creating or executing it. + Useful for validating handler configurations before deployment. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + EventHandlerAdapter instance with test results + + Example: + ```python + # Test an event handler configuration + result = await event_client.test() + print(f"Test completed: {result}") + ``` + """ + return await self._event_api.test(**kwargs) + + async def test_connectivity( + self, connectivity_test_input: ConnectivityTestInputAdapter, **kwargs + ) -> ConnectivityTestResultAdapter: + """Test connectivity to an external event system. + + Tests the connection to an external event system (like Kafka, SQS, etc.) + to verify that the configuration is correct and the system is reachable. + + Args: + connectivity_test_input: Configuration details for the connectivity test + **kwargs: Additional optional parameters to pass to the API + + Returns: + ConnectivityTestResultAdapter containing the test results Example: - -------- - ```python - # Delete a queue configuration - await event_client.delete_queue_configuration("kafka", "old_workflow_events") - print("Queue configuration deleted") - ``` + ```python + from conductor.asyncio_client.adapters.models.connectivity_test_input_adapter import ConnectivityTestInputAdapter + + # Test Kafka connectivity + test_input = ConnectivityTestInputAdapter( + queue_type="kafka", + queue_name="workflow_events", + configuration={ + "bootstrapServers": "localhost:9092", + "topic": "test_topic" + } + ) + + result = await event_client.test_connectivity(test_input) + if result.success: + print("Connectivity test passed!") + else: + print(f"Connectivity test failed: {result.error}") + ``` """ - return await self.event_api.delete_queue_config( - queue_type=queue_type, queue_name=queue_name + return await self._event_api.test_connectivity( + connectivity_test_input=connectivity_test_input, **kwargs ) diff --git a/src/conductor/asyncio_client/orkes/orkes_integration_client.py b/src/conductor/asyncio_client/orkes/orkes_integration_client.py index e96b31a0f..7459662f7 100644 --- a/src/conductor/asyncio_client/orkes/orkes_integration_client.py +++ b/src/conductor/asyncio_client/orkes/orkes_integration_client.py @@ -22,179 +22,848 @@ class OrkesIntegrationClient(OrkesBaseClient): def __init__(self, configuration: Configuration, api_client: ApiClient): + """Initialize the OrkesIntegrationClient with configuration and API client. + + Args: + configuration: Configuration object containing server settings and authentication + api_client: ApiClient instance for making API requests + + Example: + ```python + from conductor.asyncio_client.configuration.configuration import Configuration + from conductor.asyncio_client.adapters import ApiClient + + config = Configuration(server_api_url="http://localhost:8080/api") + api_client = ApiClient(configuration=config) + integration_client = OrkesIntegrationClient(config, api_client) + ``` + """ super().__init__(configuration, api_client) # Integration Provider Operations async def save_integration_provider( - self, name: str, integration_update: IntegrationUpdateAdapter + self, name: str, integration_update: IntegrationUpdateAdapter, **kwargs ) -> None: - """Create or update an integration provider""" - await self.integration_api.save_integration_provider(name, integration_update) + """Create or update an integration provider. + + Integration providers are external services like AI models, databases, or APIs + that can be integrated with Conductor workflows. + + Args: + name: Unique name for the integration provider + integration_update: Integration configuration including credentials and settings + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.integration_update_adapter import IntegrationUpdateAdapter + + # Create an OpenAI integration provider + integration = IntegrationUpdateAdapter( + category="AI_MODEL", + type="openai", + enabled=True, + configuration={ + "apiKey": "sk-...", + "model": "gpt-4" + } + ) + await integration_client.save_integration_provider("my-openai", integration) + ``` + """ + await self._integration_api.save_integration_provider( + name=name, integration_update=integration_update, **kwargs + ) async def save_integration( - self, integration_name, integration_details: IntegrationUpdateAdapter + self, integration_name, integration_details: IntegrationUpdateAdapter, **kwargs ) -> None: - await self.integration_api.save_integration_provider(integration_name, integration_details) - - async def get_integration_provider(self, name: str) -> IntegrationAdapter: - """Get integration provider by name""" - return await self.integration_api.get_integration_provider(name) + """Create or update an integration (alias for save_integration_provider). + + Args: + integration_name: Unique name for the integration + integration_details: Integration configuration including credentials and settings + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.integration_update_adapter import IntegrationUpdateAdapter + + integration = IntegrationUpdateAdapter( + category="VECTOR_DB", + type="pinecone", + enabled=True, + configuration={"apiKey": "...", "environment": "us-east-1"} + ) + await integration_client.save_integration("my-pinecone", integration) + ``` + """ + await self._integration_api.save_integration_provider( + name=integration_name, integration_update=integration_details, **kwargs + ) - async def get_integration(self, integration_name: str) -> Optional[IntegrationAdapter]: + async def get_integration_provider(self, name: str, **kwargs) -> IntegrationAdapter: + """Get integration provider by name. + + Args: + name: Name of the integration provider to retrieve + **kwargs: Additional optional parameters to pass to the API + + Returns: + IntegrationAdapter instance containing the integration provider details + + Example: + ```python + integration = await integration_client.get_integration_provider("my-openai") + print(f"Type: {integration.type}, Enabled: {integration.enabled}") + ``` + """ + return await self._integration_api.get_integration_provider(name=name, **kwargs) + + async def get_integration( + self, integration_name: str, **kwargs + ) -> Optional[IntegrationAdapter]: + """Get integration by name, returning None if not found. + + This is a safe version of get_integration_provider that returns None + instead of raising an exception when the integration is not found. + + Args: + integration_name: Name of the integration to retrieve + **kwargs: Additional optional parameters to pass to the API + + Returns: + IntegrationAdapter instance if found, None otherwise + + Example: + ```python + integration = await integration_client.get_integration("my-openai") + if integration: + print(f"Found integration: {integration.type}") + else: + print("Integration not found") + ``` + """ try: - return await self.get_integration_provider(integration_name) + return await self.get_integration_provider(name=integration_name, **kwargs) except NotFoundException: return None - async def delete_integration_provider(self, name: str) -> None: - """Delete an integration provider""" - await self.integration_api.delete_integration_provider(name) + async def delete_integration_provider(self, name: str, **kwargs) -> None: + """Delete an integration provider. + + Args: + name: Name of the integration provider to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await integration_client.delete_integration_provider("old-integration") + ``` + """ + await self._integration_api.delete_integration_provider(name=name, **kwargs) async def get_integration_providers( - self, category: Optional[str] = None, active_only: Optional[bool] = None + self, category: Optional[str] = None, active_only: Optional[bool] = None, **kwargs ) -> List[IntegrationAdapter]: - """Get all integration providers""" - return await self.integration_api.get_integration_providers( - category=category, active_only=active_only + """Get all integration providers with optional filtering. + + Args: + category: Optional category to filter by (e.g., "AI_MODEL", "VECTOR_DB") + active_only: If True, only return active integrations. If None, return all + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of IntegrationAdapter instances + + Example: + ```python + # Get all active AI model integrations + ai_integrations = await integration_client.get_integration_providers( + category="AI_MODEL", + active_only=True + ) + for integration in ai_integrations: + print(f"AI Model: {integration.name}, Type: {integration.type}") + ``` + """ + return await self._integration_api.get_integration_providers( + category=category, active_only=active_only, **kwargs ) - async def get_integration_provider_defs(self) -> List[IntegrationDefAdapter]: - """Get integration provider definitions""" - return await self.integration_api.get_integration_provider_defs() + async def get_integration_provider_defs(self, **kwargs) -> List[IntegrationDefAdapter]: + """Get integration provider definitions. + + Retrieves the definitions/schemas for all available integration types. + These definitions specify what configuration parameters are required for each type. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of IntegrationDefAdapter instances containing integration type definitions + + Example: + ```python + # Get all available integration definitions + defs = await integration_client.get_integration_provider_defs() + for definition in defs: + print(f"Type: {definition.type}, Category: {definition.category}") + print(f"Required fields: {definition.required_fields}") + ``` + """ + return await self._integration_api.get_integration_provider_defs(**kwargs) # Integration API Operations async def save_integration_api( - self, name: str, integration_name: str, integration_api_update: IntegrationApiUpdateAdapter + self, + name: str, + integration_name: str, + integration_api_update: IntegrationApiUpdateAdapter, + **kwargs, ) -> None: - """Create or update an integration API""" - await self.integration_api.save_integration_api( - name, integration_name, integration_api_update + """Create or update an integration API configuration. + + Integration APIs define specific API endpoints or functions that can be + used with an integration provider (e.g., different OpenAI models or endpoints). + + Args: + name: Name of the API configuration + integration_name: Name of the parent integration provider + integration_api_update: API configuration details + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.integration_api_update_adapter import IntegrationApiUpdateAdapter + + # Configure GPT-4 API for OpenAI integration + api_config = IntegrationApiUpdateAdapter( + api="chat_completion", + enabled=True, + configuration={ + "model": "gpt-4", + "temperature": 0.7 + } + ) + await integration_client.save_integration_api( + "gpt-4-chat", + "my-openai", + api_config + ) + ``` + """ + await self._integration_api.save_integration_api( + name=name, + integration_name=integration_name, + integration_api_update=integration_api_update, + **kwargs, ) - async def get_integration_api(self, name: str, integration_name: str) -> IntegrationApiAdapter: - """Get integration API by name and integration name""" - return await self.integration_api.get_integration_api(name, integration_name) + async def get_integration_api( + self, name: str, integration_name: str, **kwargs + ) -> IntegrationApiAdapter: + """Get integration API configuration by name. + + Args: + name: Name of the API configuration + integration_name: Name of the parent integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + IntegrationApiAdapter instance containing the API configuration + + Example: + ```python + api = await integration_client.get_integration_api("gpt-4-chat", "my-openai") + print(f"API: {api.api}, Enabled: {api.enabled}") + ``` + """ + return await self._integration_api.get_integration_api( + name=name, integration_name=integration_name, **kwargs + ) - async def delete_integration_api(self, name: str, integration_name: str) -> None: - """Delete an integration API""" - await self.integration_api.delete_integration_api(name, integration_name) + async def delete_integration_api(self, name: str, integration_name: str, **kwargs) -> None: + """Delete an integration API configuration. - async def get_integration_apis(self, integration_name: str) -> List[IntegrationApiAdapter]: - """Get all APIs for a specific integration""" - return await self.integration_api.get_integration_apis(integration_name) + Args: + name: Name of the API configuration to delete + integration_name: Name of the parent integration provider + **kwargs: Additional optional parameters to pass to the API - async def get_integration_available_apis(self, name: str) -> List[str]: - """Get available APIs for an integration""" - return await self.integration_api.get_integration_available_apis(name) + Returns: + None + + Example: + ```python + await integration_client.delete_integration_api("gpt-4-chat", "my-openai") + ``` + """ + await self._integration_api.delete_integration_api( + name=name, integration_name=integration_name, **kwargs + ) + + async def get_integration_apis( + self, integration_name: str, **kwargs + ) -> List[IntegrationApiAdapter]: + """Get all API configurations for a specific integration. + + Args: + integration_name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of IntegrationApiAdapter instances + + Example: + ```python + apis = await integration_client.get_integration_apis("my-openai") + for api in apis: + print(f"API: {api.name}, Type: {api.api}") + ``` + """ + return await self._integration_api.get_integration_apis(name=integration_name, **kwargs) + + async def get_integration_available_apis(self, name: str, **kwargs) -> List[str]: + """Get available API types for an integration. + + Returns the list of API types that can be configured for a given + integration type (e.g., for OpenAI: chat_completion, text_completion, etc.). + + Args: + name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of available API type strings + + Example: + ```python + available_apis = await integration_client.get_integration_available_apis("my-openai") + print(f"Available APIs: {available_apis}") + # Output: ['chat_completion', 'text_completion', 'embeddings'] + ``` + """ + return await self._integration_api.get_integration_available_apis(name=name, **kwargs) # Integration Operations - async def save_all_integrations(self, request_body: List[IntegrationAdapter]) -> None: - """Save all integrations""" - await self.integration_api.save_all_integrations(request_body) + async def save_all_integrations(self, request_body: List[IntegrationAdapter], **kwargs) -> None: + """Bulk save multiple integrations at once. + + Args: + request_body: List of IntegrationAdapter instances to save + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + integrations = [ + IntegrationAdapter(name="openai-1", type="openai", enabled=True), + IntegrationAdapter(name="pinecone-1", type="pinecone", enabled=True) + ] + await integration_client.save_all_integrations(integrations) + ``` + """ + await self._integration_api.save_all_integrations(integration=request_body, **kwargs) async def get_all_integrations( - self, category: Optional[str] = None, active_only: Optional[bool] = None + self, category: Optional[str] = None, active_only: Optional[bool] = None, **kwargs ) -> List[IntegrationAdapter]: - """Get all integrations with optional filtering""" - return await self.integration_api.get_all_integrations( - category=category, active_only=active_only + """Get all integrations with optional filtering. + + Args: + category: Optional category to filter by (e.g., "AI_MODEL", "VECTOR_DB") + active_only: If True, only return active integrations + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of IntegrationAdapter instances + + Example: + ```python + # Get all active vector database integrations + vector_dbs = await integration_client.get_all_integrations( + category="VECTOR_DB", + active_only=True + ) + ``` + """ + return await self._integration_api.get_all_integrations( + category=category, active_only=active_only, **kwargs ) async def get_providers_and_integrations( - self, integration_type: Optional[str] = None, active_only: Optional[bool] = None + self, integration_type: Optional[str] = None, active_only: Optional[bool] = None, **kwargs ) -> List[str]: - """Get providers and integrations together""" - return await self.integration_api.get_providers_and_integrations( - type=integration_type, active_only=active_only + """Get providers and integrations combined. + + Returns a list of all integration and provider names, optionally + filtered by type and active status. + + Args: + integration_type: Optional integration type to filter by + active_only: If True, only return active items + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of integration/provider name strings + + Example: + ```python + # Get all active AI model providers and integrations + all_ai = await integration_client.get_providers_and_integrations( + integration_type="AI_MODEL", + active_only=True + ) + ``` + """ + return await self._integration_api.get_providers_and_integrations( + type=integration_type, active_only=active_only, **kwargs ) # Tag Management Operations async def put_tag_for_integration( - self, tags: List[TagAdapter], name: str, integration_name: str + self, tags: List[TagAdapter], name: str, integration_name: str, **kwargs ) -> None: - """Add tags to an integration""" - await self.integration_api.put_tag_for_integration( - name=name, integration_name=integration_name, tag=tags + """Add tags to an integration API. + + Args: + tags: List of tags to add + name: Name of the API configuration + integration_name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [ + TagAdapter(key="environment", value="production"), + TagAdapter(key="model", value="gpt-4") + ] + await integration_client.put_tag_for_integration(tags, "gpt-4-chat", "my-openai") + ``` + """ + await self._integration_api.put_tag_for_integration( + name=name, integration_name=integration_name, tag=tags, **kwargs ) - async def get_tags_for_integration(self, name: str, integration_name: str) -> List[TagAdapter]: - """Get tags for an integration""" - return await self.integration_api.get_tags_for_integration( - name=name, integration_name=integration_name + async def get_tags_for_integration( + self, name: str, integration_name: str, **kwargs + ) -> List[TagAdapter]: + """Get tags for an integration API. + + Args: + name: Name of the API configuration + integration_name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TagAdapter instances + + Example: + ```python + tags = await integration_client.get_tags_for_integration("gpt-4-chat", "my-openai") + for tag in tags: + print(f"{tag.key}: {tag.value}") + ``` + """ + return await self._integration_api.get_tags_for_integration( + name=name, integration_name=integration_name, **kwargs ) async def delete_tag_for_integration( - self, tags: List[TagAdapter], name: str, integration_name: str + self, tags: List[TagAdapter], name: str, integration_name: str, **kwargs ) -> None: - """Delete tags from an integration""" - await self.integration_api.delete_tag_for_integration( - name=name, integration_name=integration_name, tag=tags + """Delete tags from an integration API. + + Args: + tags: List of tags to delete + name: Name of the API configuration + integration_name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [TagAdapter(key="environment", value="production")] + await integration_client.delete_tag_for_integration(tags, "gpt-4-chat", "my-openai") + ``` + """ + await self._integration_api.delete_tag_for_integration( + name=name, integration_name=integration_name, tag=tags, **kwargs ) - async def put_tag_for_integration_provider(self, body: List[TagAdapter], name: str) -> None: - """Add tags to an integration provider""" - await self.integration_api.put_tag_for_integration_provider(name, body) + async def put_tag_for_integration_provider( + self, body: List[TagAdapter], name: str, **kwargs + ) -> None: + """Add tags to an integration provider. + + Args: + body: List of tags to add + name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [ + TagAdapter(key="team", value="ai-platform"), + TagAdapter(key="cost-center", value="engineering") + ] + await integration_client.put_tag_for_integration_provider(tags, "my-openai") + ``` + """ + await self._integration_api.put_tag_for_integration_provider(name=name, tag=body, **kwargs) + + async def get_tags_for_integration_provider(self, name: str, **kwargs) -> List[TagAdapter]: + """Get tags for an integration provider. + + Args: + name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TagAdapter instances + + Example: + ```python + tags = await integration_client.get_tags_for_integration_provider("my-openai") + for tag in tags: + print(f"{tag.key}: {tag.value}") + ``` + """ + return await self._integration_api.get_tags_for_integration_provider(name=name, **kwargs) + + async def delete_tag_for_integration_provider( + self, body: List[TagAdapter], name: str, **kwargs + ) -> None: + """Delete tags from an integration provider. + + Args: + body: List of tags to delete + name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [TagAdapter(key="team", value="ai-platform")] + await integration_client.delete_tag_for_integration_provider(tags, "my-openai") + ``` + """ + await self._integration_api.delete_tag_for_integration_provider( + name=name, tag=body, **kwargs + ) - async def get_tags_for_integration_provider(self, name: str) -> List[TagAdapter]: - """Get tags for an integration provider""" - return await self.integration_api.get_tags_for_integration_provider(name) + # Token Usage Operations + async def get_token_usage_for_integration( + self, name: str, integration_name: str, **kwargs + ) -> int: + """Get token usage for a specific integration API. + + Returns the total number of tokens consumed by a specific API configuration. + + Args: + name: Name of the API configuration + integration_name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + Total token count as an integer + + Example: + ```python + usage = await integration_client.get_token_usage_for_integration( + "gpt-4-chat", + "my-openai" + ) + print(f"Total tokens used: {usage}") + ``` + """ + return await self._integration_api.get_token_usage_for_integration( + name=name, integration_name=integration_name, **kwargs + ) - async def delete_tag_for_integration_provider(self, body: List[TagAdapter], name: str) -> None: - """Delete tags from an integration provider""" - await self.integration_api.delete_tag_for_integration_provider(name, body) + async def get_token_usage_for_integration_provider(self, name: str, **kwargs) -> Dict[str, str]: + """Get token usage for an integration provider. - # Token Usage Operations - async def get_token_usage_for_integration(self, name: str, integration_name: str) -> int: - """Get token usage for a specific integration""" - return await self.integration_api.get_token_usage_for_integration(name, integration_name) + Returns token usage statistics for all APIs under an integration provider. - async def get_token_usage_for_integration_provider(self, name: str) -> Dict[str, str]: - """Get token usage for an integration provider""" - return await self.integration_api.get_token_usage_for_integration_provider(name) + Args: + name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API - async def register_token_usage(self, name: str, integration_name: str, tokens: int) -> None: - """Register token usage for an integration""" - await self.integration_api.register_token_usage(name, integration_name, tokens) + Returns: + Dictionary mapping API names to their token usage + + Example: + ```python + usage = await integration_client.get_token_usage_for_integration_provider("my-openai") + for api_name, tokens in usage.items(): + print(f"{api_name}: {tokens} tokens") + ``` + """ + return await self._integration_api.get_token_usage_for_integration_provider( + name=name, **kwargs + ) + + async def register_token_usage( + self, name: str, integration_name: str, tokens: int, **kwargs + ) -> None: + """Register token usage for an integration. + + Records token consumption for billing and monitoring purposes. + + Args: + name: Name of the API configuration + integration_name: Name of the integration provider + tokens: Number of tokens consumed + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Register that 1500 tokens were consumed + await integration_client.register_token_usage( + "gpt-4-chat", + "my-openai", + 1500 + ) + ``` + """ + await self._integration_api.register_token_usage( + name=name, integration_name=integration_name, body=tokens, **kwargs + ) # Prompt Integration Operations async def associate_prompt_with_integration( - self, ai_prompt: str, integration_provider: str, integration_name: str + self, ai_prompt: str, integration_provider: str, integration_name: str, **kwargs ) -> None: - """Associate a prompt with an integration""" - await self.integration_api.associate_prompt_with_integration( - ai_prompt, integration_provider, integration_name + """Associate a prompt template with an integration. + + Links a prompt template to an integration so it can be used with AI models. + + Args: + ai_prompt: Name of the prompt template to associate + integration_provider: Name of the integration provider + integration_name: Name of the specific integration API + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await integration_client.associate_prompt_with_integration( + "customer_service_prompt", + "my-openai", + "gpt-4-chat" + ) + ``` + """ + await self._integration_api.associate_prompt_with_integration( + integration_provider=integration_provider, + integration_name=integration_name, + prompt_name=ai_prompt, + **kwargs, ) async def get_prompts_with_integration( - self, integration_provider: str, integration_name: str + self, integration_provider: str, integration_name: str, **kwargs ) -> List[MessageTemplateAdapter]: - """Get prompts associated with an integration""" - return await self.integration_api.get_prompts_with_integration( - integration_provider, integration_name + """Get prompts associated with an integration. + + Retrieves all prompt templates that are linked to a specific integration. + + Args: + integration_provider: Name of the integration provider + integration_name: Name of the specific integration API + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of MessageTemplateAdapter instances + + Example: + ```python + prompts = await integration_client.get_prompts_with_integration( + "my-openai", + "gpt-4-chat" + ) + for prompt in prompts: + print(f"Prompt: {prompt.name}") + ``` + """ + return await self._integration_api.get_prompts_with_integration( + integration_provider=integration_provider, integration_name=integration_name, **kwargs ) # Event and Statistics Operations - async def record_event_stats(self, event_type: str, event_log: List[EventLogAdapter]) -> None: - """Record event statistics""" - await self.integration_api.record_event_stats(type=event_type, event_log=event_log) + async def record_event_stats( + self, event_type: str, event_log: List[EventLogAdapter], **kwargs + ) -> None: + """Record event statistics for integrations. + + Records integration usage events for monitoring and analytics. + + Args: + event_type: Type of event being recorded + event_log: List of event log entries to record + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.event_log_adapter import EventLogAdapter + + events = [ + EventLogAdapter( + event="api_call", + timestamp=1234567890, + metadata={"tokens": 1500, "model": "gpt-4"} + ) + ] + await integration_client.record_event_stats("usage", events) + ``` + """ + await self._integration_api.record_event_stats( + type=event_type, event_log=event_log, **kwargs + ) # Utility Methods async def get_integration_by_category( self, category: str, active_only: bool = True ) -> List[IntegrationAdapter]: - """Get integrations filtered by category""" + """Get integrations filtered by category. + + Convenience method for retrieving integrations of a specific type. + + Args: + category: Category to filter by (e.g., "AI_MODEL", "VECTOR_DB") + active_only: If True, only return active integrations. Defaults to True + + Returns: + List of IntegrationAdapter instances matching the category + + Example: + ```python + # Get all active AI model integrations + ai_models = await integration_client.get_integration_by_category("AI_MODEL") + for model in ai_models: + print(f"Model: {model.name}") + ``` + """ return await self.get_all_integrations(category=category, active_only=active_only) async def get_active_integrations(self) -> List[IntegrationAdapter]: - """Get only active integrations""" + """Get only active integrations. + + Convenience method for retrieving all active integrations regardless of category. + + Returns: + List of all active IntegrationAdapter instances + + Example: + ```python + active = await integration_client.get_active_integrations() + print(f"Found {len(active)} active integrations") + ``` + """ return await self.get_all_integrations(active_only=True) async def get_integration_provider_by_category( - self, category: str, active_only: bool = True + self, category: str, active_only: bool = True, **kwargs ) -> List[IntegrationAdapter]: - """Get integration providers filtered by category""" - return await self.get_integration_providers(category=category, active_only=active_only) + """Get integration providers filtered by category. + + Convenience method for retrieving integration providers of a specific type. + + Args: + category: Category to filter by (e.g., "AI_MODEL", "VECTOR_DB") + active_only: If True, only return active providers. Defaults to True + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of IntegrationAdapter instances matching the category + + Example: + ```python + # Get all active vector database providers + vector_dbs = await integration_client.get_integration_provider_by_category("VECTOR_DB") + ``` + """ + return await self.get_integration_providers( + category=category, active_only=active_only, **kwargs + ) + + async def get_active_integration_providers(self, **kwargs) -> List[IntegrationAdapter]: + """Get only active integration providers. + + Convenience method for retrieving all active integration providers. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of all active IntegrationAdapter provider instances + + Example: + ```python + providers = await integration_client.get_active_integration_providers() + for provider in providers: + print(f"Provider: {provider.name}, Type: {provider.type}") + ``` + """ + return await self.get_integration_providers(active_only=True, **kwargs) + + async def get_integrations(self, **kwargs) -> List[IntegrationAdapter]: + """Get all integrations. + + Convenience method that retrieves all integration providers. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of all IntegrationAdapter instances - async def get_active_integration_providers(self) -> List[IntegrationAdapter]: - """Get only active integration providers""" - return await self.get_integration_providers(active_only=True) + Example: + ```python + all_integrations = await integration_client.get_integrations() + print(f"Total integrations: {len(all_integrations)}") + ``` + """ + return await self._integration_api.get_integration_providers(**kwargs) diff --git a/src/conductor/asyncio_client/orkes/orkes_metadata_client.py b/src/conductor/asyncio_client/orkes/orkes_metadata_client.py index 2bce4de0c..f876f8632 100644 --- a/src/conductor/asyncio_client/orkes/orkes_metadata_client.py +++ b/src/conductor/asyncio_client/orkes/orkes_metadata_client.py @@ -1,6 +1,9 @@ from __future__ import annotations -from typing import List, Optional +from typing import Any, Dict, List, Optional, cast + +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.extended_task_def_adapter import ( @@ -18,24 +21,173 @@ class OrkesMetadataClient(OrkesBaseClient): def __init__(self, configuration: Configuration, api_client: ApiClient): + """Initialize the OrkesMetadataClient with configuration and API client. + + Args: + configuration: Configuration object containing server settings and authentication + api_client: ApiClient instance for making API requests + + Example: + ```python + from conductor.asyncio_client.configuration.configuration import Configuration + from conductor.asyncio_client.adapters import ApiClient + + config = Configuration(server_api_url="http://localhost:8080/api") + api_client = ApiClient(configuration=config) + metadata_client = OrkesMetadataClient(config, api_client) + ``` + """ super().__init__(configuration, api_client) # Task Definition Operations + @deprecated("register_task_def is deprecated; use register_task_def_validated instead") + @typing_deprecated("register_task_def is deprecated; use register_task_def_validated instead") async def register_task_def(self, task_def: ExtendedTaskDefAdapter) -> None: - """Register a new task definition""" - await self.metadata_api.register_task_def([task_def]) + """Register a new task definition. + + .. deprecated:: + Use register_task_def_validated instead for type-safe validated responses. + + Args: + task_def: Task definition to register + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.extended_task_def_adapter import ExtendedTaskDefAdapter + + task_def = ExtendedTaskDefAdapter( + name="my_task", + description="My custom task", + timeout_seconds=60, + retry_count=3 + ) + await metadata_client.register_task_def(task_def) + ``` + """ + await self._metadata_api.register_task_def([task_def]) + + async def register_task_def_validated( + self, extended_task_def: List[ExtendedTaskDefAdapter], **kwargs + ) -> None: + """Register one or more task definitions. + + Args: + extended_task_def: List of task definitions to register + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.extended_task_def_adapter import ExtendedTaskDefAdapter + + task_defs = [ + ExtendedTaskDefAdapter( + name="task1", + description="First task", + timeout_seconds=60 + ), + ExtendedTaskDefAdapter( + name="task2", + description="Second task", + timeout_seconds=120 + ) + ] + await metadata_client.register_task_def_validated(task_defs) + ``` + """ + await self._metadata_api.register_task_def(extended_task_def=extended_task_def, **kwargs) + + async def update_task_def(self, task_def: ExtendedTaskDefAdapter, **kwargs) -> None: + """Update an existing task definition. + + Args: + task_def: Updated task definition + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.extended_task_def_adapter import ExtendedTaskDefAdapter + + task_def = ExtendedTaskDefAdapter( + name="my_task", + description="Updated description", + timeout_seconds=90, + retry_count=5 + ) + await metadata_client.update_task_def(task_def) + ``` + """ + await self._metadata_api.update_task_def(extended_task_def=task_def, **kwargs) + + async def unregister_task_def(self, task_type: str, **kwargs) -> None: + """Unregister (delete) a task definition. + + Args: + task_type: Name of the task definition to unregister + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await metadata_client.unregister_task_def("my_task") + ``` + """ + await self._metadata_api.unregister_task_def(tasktype=task_type, **kwargs) + + @deprecated("get_task_def is deprecated; use get_task_def_validated instead") + @typing_deprecated("get_task_def is deprecated; use get_task_def_validated instead") + async def get_task_def(self, task_type: str) -> object: + """Get a task definition by task type. + + .. deprecated:: + Use get_task_def_validated instead for type-safe validated responses. - async def update_task_def(self, task_def: ExtendedTaskDefAdapter) -> None: - """Update an existing task definition""" - await self.metadata_api.update_task_def(task_def) + Args: + task_type: Name of the task definition - async def unregister_task_def(self, task_type: str) -> None: - """Unregister a task definition""" - await self.metadata_api.unregister_task_def(task_type) + Returns: + Raw response object from the API - async def get_task_def(self, task_type: str) -> TaskDefAdapter: - """Get a task definition by task type""" - return await self.metadata_api.get_task_def(task_type) + Example: + ```python + task_def = await metadata_client.get_task_def("my_task") + ``` + """ + return await self._metadata_api.get_task_def(task_type) + + async def get_task_def_validated(self, task_type: str, **kwargs) -> Optional[TaskDefAdapter]: + """Get a task definition by task type. + + Args: + task_type: Name of the task definition + **kwargs: Additional optional parameters to pass to the API + + Returns: + TaskDefAdapter instance containing the task definition, or None if not found + + Example: + ```python + task_def = await metadata_client.get_task_def_validated("my_task") + if task_def: + print(f"Task: {task_def.name}, Timeout: {task_def.timeout_seconds}s") + ``` + """ + result = await self._metadata_api.get_task_def(tasktype=task_type, **kwargs) + + result_dict = cast(Dict[str, Any], result) + result_model = TaskDefAdapter.from_dict(result_dict) + + return result_model async def get_task_defs( self, @@ -43,40 +195,213 @@ async def get_task_defs( metadata: Optional[bool] = None, tag_key: Optional[str] = None, tag_value: Optional[str] = None, + **kwargs, ) -> List[TaskDefAdapter]: - """Get all task definitions with optional filtering""" - return await self.metadata_api.get_task_defs( - access=access, metadata=metadata, tag_key=tag_key, tag_value=tag_value + """Get all task definitions with optional filtering. + + Args: + access: Filter by access level (e.g., "READ", "EXECUTE") + metadata: If True, include metadata in the response + tag_key: Filter by tag key + tag_value: Filter by tag value (requires tag_key) + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TaskDefAdapter instances + + Example: + ```python + # Get all task definitions + all_tasks = await metadata_client.get_task_defs() + + # Get tasks with metadata + tasks_with_metadata = await metadata_client.get_task_defs(metadata=True) + + # Get tasks by tag + tagged_tasks = await metadata_client.get_task_defs( + tag_key="environment", + tag_value="production" + ) + ``` + """ + return await self._metadata_api.get_task_defs( + access=access, metadata=metadata, tag_key=tag_key, tag_value=tag_value, **kwargs ) # Workflow Definition Operations + @deprecated("create_workflow_def is deprecated; use create_workflow_def_validated instead") + @typing_deprecated( + "create_workflow_def is deprecated; use create_workflow_def_validated instead" + ) async def create_workflow_def( self, extended_workflow_def: ExtendedWorkflowDefAdapter, overwrite: Optional[bool] = None, new_version: Optional[bool] = None, ) -> object: - """Create a new workflow definition""" - return await self.metadata_api.create( + """Create a new workflow definition. + + .. deprecated:: + Use create_workflow_def_validated instead for type-safe validated responses. + + Args: + extended_workflow_def: Workflow definition to create + overwrite: If True, overwrite existing definition with same name + new_version: If True, create a new version instead of overwriting + + Returns: + Raw response object from the API + + Example: + ```python + from conductor.asyncio_client.adapters.models.extended_workflow_def_adapter import ExtendedWorkflowDefAdapter + + workflow_def = ExtendedWorkflowDefAdapter( + name="my_workflow", + description="My workflow", + version=1, + tasks=[] + ) + await metadata_client.create_workflow_def(workflow_def) + ``` + """ + return await self._metadata_api.create( extended_workflow_def, overwrite=overwrite, new_version=new_version ) + async def create_workflow_def_validated( + self, + extended_workflow_def: ExtendedWorkflowDefAdapter, + overwrite: Optional[bool] = None, + new_version: Optional[bool] = None, + **kwargs, + ) -> None: + """Create a new workflow definition. + + Args: + extended_workflow_def: Workflow definition to create + overwrite: If True, overwrite existing definition with same name + new_version: If True, create a new version instead of overwriting + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.extended_workflow_def_adapter import ExtendedWorkflowDefAdapter + from conductor.asyncio_client.adapters.models.workflow_task_adapter import WorkflowTaskAdapter + + workflow_def = ExtendedWorkflowDefAdapter( + name="my_workflow", + description="Order processing workflow", + version=1, + tasks=[ + WorkflowTaskAdapter( + name="validate_order", + task_reference_name="validate_ref", + type="SIMPLE" + ) + ] + ) + await metadata_client.create_workflow_def_validated(workflow_def) + ``` + """ + await self._metadata_api.create( + extended_workflow_def, overwrite=overwrite, new_version=new_version, **kwargs + ) + + @deprecated("update_workflow_defs is deprecated; use update_workflow_defs_validated instead") + @typing_deprecated( + "update_workflow_defs is deprecated; use update_workflow_defs_validated instead" + ) async def update_workflow_defs( self, extended_workflow_defs: List[ExtendedWorkflowDefAdapter], overwrite: Optional[bool] = None, new_version: Optional[bool] = None, ) -> object: - """Create or update multiple workflow definitions""" - return await self.metadata_api.update( + """Create or update multiple workflow definitions. + + .. deprecated:: + Use update_workflow_defs_validated instead for type-safe validated responses. + + Args: + extended_workflow_defs: List of workflow definitions to create/update + overwrite: If True, overwrite existing definitions + new_version: If True, create new versions instead of overwriting + + Returns: + Raw response object from the API + + Example: + ```python + workflows = [workflow_def1, workflow_def2] + await metadata_client.update_workflow_defs(workflows, overwrite=True) + ``` + """ + return await self._metadata_api.update( extended_workflow_defs, overwrite=overwrite, new_version=new_version ) + async def update_workflow_defs_validated( + self, + extended_workflow_defs: List[ExtendedWorkflowDefAdapter], + overwrite: Optional[bool] = None, + new_version: Optional[bool] = None, + **kwargs, + ) -> None: + """Create or update multiple workflow definitions. + + Args: + extended_workflow_defs: List of workflow definitions to create/update + overwrite: If True, overwrite existing definitions + new_version: If True, create new versions instead of overwriting + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + workflows = [workflow_def1, workflow_def2] + await metadata_client.update_workflow_defs_validated(workflows, overwrite=True) + ``` + """ + await self._metadata_api.update( + extended_workflow_def=extended_workflow_defs, + overwrite=overwrite, + new_version=new_version, + **kwargs, + ) + async def get_workflow_def( - self, name: str, version: Optional[int] = None, metadata: Optional[bool] = None + self, name: str, version: Optional[int] = None, metadata: Optional[bool] = None, **kwargs ) -> WorkflowDefAdapter: - """Get a workflow definition by name and version""" - return await self.metadata_api.get(name, version=version, metadata=metadata) + """Get a workflow definition by name and version. + + Args: + name: Name of the workflow definition + version: Optional version number. If None, returns the latest version + metadata: If True, include metadata in the response + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowDefAdapter instance containing the workflow definition + + Example: + ```python + # Get latest version + workflow = await metadata_client.get_workflow_def("my_workflow") + + # Get specific version + workflow_v2 = await metadata_client.get_workflow_def("my_workflow", version=2) + + # Get with metadata + workflow_meta = await metadata_client.get_workflow_def("my_workflow", metadata=True) + ``` + """ + return await self._metadata_api.get(name=name, version=version, metadata=metadata, **kwargs) async def get_workflow_defs( self, @@ -86,135 +411,724 @@ async def get_workflow_defs( tag_value: Optional[str] = None, name: Optional[str] = None, short: Optional[bool] = None, + **kwargs, ) -> List[WorkflowDefAdapter]: - """Get all workflow definitions with optional filtering""" - return await self.metadata_api.get_workflow_defs( + """Get all workflow definitions with optional filtering. + + Args: + access: Filter by access level (e.g., "READ", "EXECUTE") + metadata: If True, include metadata in the response + tag_key: Filter by tag key + tag_value: Filter by tag value (requires tag_key) + name: Filter by workflow name (returns all versions of that workflow) + short: If True, return short format without task details + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowDefAdapter instances + + Example: + ```python + # Get all workflows + all_workflows = await metadata_client.get_workflow_defs() + + # Get workflows by tag + prod_workflows = await metadata_client.get_workflow_defs( + tag_key="environment", + tag_value="production" + ) + + # Get short format (faster) + workflows_short = await metadata_client.get_workflow_defs(short=True) + ``` + """ + return await self._metadata_api.get_workflow_defs( access=access, metadata=metadata, tag_key=tag_key, tag_value=tag_value, name=name, short=short, + **kwargs, ) - async def unregister_workflow_def(self, name: str, version: int) -> None: - """Unregister a workflow definition""" - await self.metadata_api.unregister_workflow_def(name, version) + async def unregister_workflow_def(self, name: str, version: int, **kwargs) -> None: + """Unregister (delete) a workflow definition. + + Args: + name: Name of the workflow definition + version: Version number to unregister + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await metadata_client.unregister_workflow_def("my_workflow", version=1) + ``` + """ + await self._metadata_api.unregister_workflow_def(name=name, version=version, **kwargs) # Bulk Operations - async def upload_definitions_to_s3(self) -> None: - """Upload all workflows and tasks definitions to Object storage if configured""" - await self.metadata_api.upload_workflows_and_tasks_definitions_to_s3() + async def upload_definitions_to_s3(self, **kwargs) -> None: + """Upload all workflow and task definitions to object storage. + + Backs up all metadata definitions to configured S3-compatible storage. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await metadata_client.upload_definitions_to_s3() + ``` + """ + await self._metadata_api.upload_workflows_and_tasks_definitions_to_s3(**kwargs) # Convenience Methods - async def get_latest_workflow_def(self, name: str) -> WorkflowDefAdapter: - """Get the latest version of a workflow definition""" - return await self.get_workflow_def(name) + async def get_latest_workflow_def(self, name: str, **kwargs) -> WorkflowDefAdapter: + """Get the latest version of a workflow definition. + + Args: + name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowDefAdapter instance for the latest version + + Example: + ```python + latest = await metadata_client.get_latest_workflow_def("my_workflow") + print(f"Latest version: {latest.version}") + ``` + """ + return await self.get_workflow_def(name=name, **kwargs) async def get_workflow_def_with_metadata( - self, name: str, version: Optional[int] = None + self, name: str, version: Optional[int] = None, **kwargs ) -> WorkflowDefAdapter: - """Get workflow definition with metadata included""" - return await self.get_workflow_def(name, version=version, metadata=True) + """Get workflow definition with metadata included. + + Args: + name: Name of the workflow + version: Optional version number + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowDefAdapter instance with metadata + + Example: + ```python + workflow = await metadata_client.get_workflow_def_with_metadata("my_workflow") + ``` + """ + return await self.get_workflow_def(name=name, version=version, metadata=True, **kwargs) + + async def get_all_task_defs(self, **kwargs) -> List[TaskDefAdapter]: + """Get all task definitions. - async def get_all_task_defs(self) -> List[TaskDefAdapter]: - """Get all task definitions""" - return await self.get_task_defs() + Args: + **kwargs: Additional optional parameters to pass to the API - async def get_all_workflow_defs(self) -> List[WorkflowDefAdapter]: - """Get all workflow definitions""" - return await self.get_workflow_defs() + Returns: + List of all TaskDefAdapter instances - async def get_task_defs_by_tag(self, tag_key: str, tag_value: str) -> List[TaskDefAdapter]: - """Get task definitions filtered by tag""" - return await self.get_task_defs(tag_key=tag_key, tag_value=tag_value) + Example: + ```python + tasks = await metadata_client.get_all_task_defs() + print(f"Total tasks: {len(tasks)}") + ``` + """ + return await self.get_task_defs(**kwargs) + + async def get_all_workflow_defs(self, **kwargs) -> List[WorkflowDefAdapter]: + """Get all workflow definitions. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of all WorkflowDefAdapter instances + + Example: + ```python + workflows = await metadata_client.get_all_workflow_defs() + print(f"Total workflows: {len(workflows)}") + ``` + """ + return await self.get_workflow_defs(**kwargs) + + async def get_task_defs_by_tag( + self, tag_key: str, tag_value: str, **kwargs + ) -> List[TaskDefAdapter]: + """Get task definitions filtered by tag. + + Args: + tag_key: Tag key to filter by + tag_value: Tag value to filter by + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TaskDefAdapter instances with matching tag + + Example: + ```python + tasks = await metadata_client.get_task_defs_by_tag("environment", "production") + ``` + """ + return await self.get_task_defs(tag_key=tag_key, tag_value=tag_value, **kwargs) async def get_workflow_defs_by_tag( - self, tag_key: str, tag_value: str + self, tag_key: str, tag_value: str, **kwargs ) -> List[WorkflowDefAdapter]: - """Get workflow definitions filtered by tag""" - return await self.get_workflow_defs(tag_key=tag_key, tag_value=tag_value) + """Get workflow definitions filtered by tag. + + Args: + tag_key: Tag key to filter by + tag_value: Tag value to filter by + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowDefAdapter instances with matching tag + + Example: + ```python + workflows = await metadata_client.get_workflow_defs_by_tag("team", "platform") + ``` + """ + return await self.get_workflow_defs(tag_key=tag_key, tag_value=tag_value, **kwargs) - async def get_task_defs_with_metadata(self) -> List[TaskDefAdapter]: - """Get all task definitions with metadata""" - return await self.get_task_defs(metadata=True) + async def get_task_defs_with_metadata(self, **kwargs) -> List[TaskDefAdapter]: + """Get all task definitions with metadata. - async def get_workflow_defs_with_metadata(self) -> List[WorkflowDefAdapter]: - """Get all workflow definitions with metadata""" - return await self.get_workflow_defs(metadata=True) + Args: + **kwargs: Additional optional parameters to pass to the API - async def get_workflow_defs_by_name(self, name: str) -> List[WorkflowDefAdapter]: - """Get all versions of a workflow definition by name""" - return await self.get_workflow_defs(name=name) + Returns: + List of TaskDefAdapter instances with metadata - async def get_workflow_defs_short(self) -> List[WorkflowDefAdapter]: - """Get workflow definitions in short format (without task details)""" - return await self.get_workflow_defs(short=True) + Example: + ```python + tasks = await metadata_client.get_task_defs_with_metadata() + ``` + """ + return await self.get_task_defs(metadata=True, **kwargs) + + async def get_workflow_defs_with_metadata(self, **kwargs) -> List[WorkflowDefAdapter]: + """Get all workflow definitions with metadata. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowDefAdapter instances with metadata + + Example: + ```python + workflows = await metadata_client.get_workflow_defs_with_metadata() + ``` + """ + return await self.get_workflow_defs(metadata=True, **kwargs) + + async def get_workflow_defs_by_name(self, name: str, **kwargs) -> List[WorkflowDefAdapter]: + """Get all versions of a workflow definition by name. + + Args: + name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowDefAdapter instances for all versions + + Example: + ```python + versions = await metadata_client.get_workflow_defs_by_name("my_workflow") + for v in versions: + print(f"Version {v.version}") + ``` + """ + return await self.get_workflow_defs(name=name, **kwargs) + + async def get_workflow_defs_short(self, **kwargs) -> List[WorkflowDefAdapter]: + """Get workflow definitions in short format (without task details). + + Faster than full format, useful for listing workflows. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowDefAdapter instances in short format + + Example: + ```python + workflows = await metadata_client.get_workflow_defs_short() + ``` + """ + return await self.get_workflow_defs(short=True, **kwargs) # Access Control Methods - async def get_task_defs_by_access(self, access: str) -> List[TaskDefAdapter]: - """Get task definitions filtered by access level""" - return await self.get_task_defs(access=access) + async def get_task_defs_by_access(self, access: str, **kwargs) -> List[TaskDefAdapter]: + """Get task definitions filtered by access level. - async def get_workflow_defs_by_access(self, access: str) -> List[WorkflowDefAdapter]: - """Get workflow definitions filtered by access level""" - return await self.get_workflow_defs(access=access) + Args: + access: Access level to filter by (e.g., "READ", "EXECUTE") + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TaskDefAdapter instances with specified access level + + Example: + ```python + executable_tasks = await metadata_client.get_task_defs_by_access("EXECUTE") + ``` + """ + return await self.get_task_defs(access=access, **kwargs) + + async def get_workflow_defs_by_access(self, access: str, **kwargs) -> List[WorkflowDefAdapter]: + """Get workflow definitions filtered by access level. + + Args: + access: Access level to filter by (e.g., "READ", "EXECUTE") + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowDefAdapter instances with specified access level + + Example: + ```python + readable_workflows = await metadata_client.get_workflow_defs_by_access("READ") + ``` + """ + return await self.get_workflow_defs(access=access, **kwargs) # Bulk Registration + @deprecated("register_workflow_def is deprecated; use register_workflow_def_validated instead") + @typing_deprecated( + "register_workflow_def is deprecated; use register_workflow_def_validated instead" + ) async def register_workflow_def( - self, extended_workflow_def: ExtendedWorkflowDefAdapter, overwrite: bool = False + self, + extended_workflow_def: ExtendedWorkflowDefAdapter, + overwrite: bool = False, ) -> object: - """Register a new workflow definition (alias for create_workflow_def)""" + """Register a new workflow definition (alias for create_workflow_def). + + .. deprecated:: + Use register_workflow_def_validated instead for type-safe validated responses. + + Args: + extended_workflow_def: Workflow definition to register + overwrite: If True, overwrite existing definition + + Returns: + Raw response object from the API + + Example: + ```python + await metadata_client.register_workflow_def(workflow_def, overwrite=False) + ``` + """ return await self.create_workflow_def(extended_workflow_def, overwrite=overwrite) + async def register_workflow_def_validated( + self, extended_workflow_def: ExtendedWorkflowDefAdapter, overwrite: bool = False, **kwargs + ) -> None: + """Register a new workflow definition (alias for create_workflow_def_validated). + + Args: + extended_workflow_def: Workflow definition to register + overwrite: If True, overwrite existing definition + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await metadata_client.register_workflow_def_validated(workflow_def) + ``` + """ + await self.create_workflow_def_validated( + extended_workflow_def=extended_workflow_def, overwrite=overwrite, **kwargs + ) + + @deprecated("update_workflow_def is deprecated; use update_workflow_def_validated instead") + @typing_deprecated( + "update_workflow_def is deprecated; use update_workflow_def_validated instead" + ) async def update_workflow_def( self, extended_workflow_def: ExtendedWorkflowDefAdapter, overwrite: bool = True ) -> object: - """Update a workflow definition (alias for create_workflow_def with overwrite)""" + """Update a workflow definition (alias for create_workflow_def with overwrite). + + .. deprecated:: + Use update_workflow_def_validated instead for type-safe validated responses. + + Args: + extended_workflow_def: Updated workflow definition + overwrite: If True, overwrite existing definition (default: True) + + Returns: + Raw response object from the API + + Example: + ```python + await metadata_client.update_workflow_def(workflow_def) + ``` + """ return await self.create_workflow_def(extended_workflow_def, overwrite=overwrite) + async def update_workflow_def_validated( + self, extended_workflow_def: ExtendedWorkflowDefAdapter, overwrite: bool = True, **kwargs + ) -> None: + """Update a workflow definition (alias for create_workflow_def_validated with overwrite). + + Args: + extended_workflow_def: Updated workflow definition + overwrite: If True, overwrite existing definition (default: True) + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await metadata_client.update_workflow_def_validated(workflow_def) + ``` + """ + await self.create_workflow_def_validated( + extended_workflow_def=extended_workflow_def, overwrite=overwrite, **kwargs + ) + # Legacy compatibility methods - async def get_workflow_def_versions(self, name: str) -> List[int]: - """Get all version numbers for a workflow definition""" - workflow_defs = await self.get_workflow_defs_by_name(name) + async def get_workflow_def_versions(self, name: str, **kwargs) -> List[int]: + """Get all version numbers for a workflow definition. + + Args: + name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of version numbers + + Example: + ```python + versions = await metadata_client.get_workflow_def_versions("my_workflow") + print(f"Available versions: {versions}") # [1, 2, 3] + ``` + """ + workflow_defs = await self.get_workflow_defs_by_name(name=name, **kwargs) return [wd.version for wd in workflow_defs if wd.version is not None] - async def get_workflow_def_latest_version(self, name: str) -> WorkflowDefAdapter: - """Get the latest version workflow definition""" - return await self.get_latest_workflow_def(name) + async def get_workflow_def_latest_version(self, name: str, **kwargs) -> WorkflowDefAdapter: + """Get the latest version workflow definition (alias for get_latest_workflow_def). + + Args: + name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowDefAdapter instance for the latest version + + Example: + ```python + latest = await metadata_client.get_workflow_def_latest_version("my_workflow") + ``` + """ + return await self.get_latest_workflow_def(name=name, **kwargs) + + async def get_workflow_def_latest_versions(self, **kwargs) -> List[WorkflowDefAdapter]: + """Get the latest version of all workflow definitions. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowDefAdapter instances (latest version of each workflow) + + Example: + ```python + all_latest = await metadata_client.get_workflow_def_latest_versions() + ``` + """ + return await self.get_all_workflow_defs(**kwargs) + + async def get_workflow_def_by_version( + self, name: str, version: int, **kwargs + ) -> WorkflowDefAdapter: + """Get workflow definition by name and specific version. + + Args: + name: Name of the workflow + version: Version number + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowDefAdapter instance for the specified version + + Example: + ```python + v2 = await metadata_client.get_workflow_def_by_version("my_workflow", 2) + ``` + """ + return await self.get_workflow_def(name=name, version=version, **kwargs) + + async def get_workflow_def_by_name(self, name: str, **kwargs) -> List[WorkflowDefAdapter]: + """Get all versions of workflow definition by name. + + Args: + name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowDefAdapter instances for all versions + + Example: + ```python + all_versions = await metadata_client.get_workflow_def_by_name("my_workflow") + ``` + """ + return await self.get_workflow_defs_by_name(name=name, **kwargs) + + async def add_workflow_tag(self, tag: TagAdapter, workflow_name: str, **kwargs) -> None: + """Add a tag to a workflow definition. + + Args: + tag: Tag to add + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tag = TagAdapter(key="environment", value="production") + await metadata_client.add_workflow_tag(tag, "my_workflow") + ``` + """ + await self._tags_api.add_workflow_tag(name=workflow_name, tag=tag, **kwargs) + + async def delete_workflow_tag(self, tag: TagAdapter, workflow_name: str, **kwargs) -> None: + """Delete a tag from a workflow definition. + + Args: + tag: Tag to delete + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tag = TagAdapter(key="environment", value="production") + await metadata_client.delete_workflow_tag(tag, "my_workflow") + ``` + """ + await self._tags_api.delete_workflow_tag(name=workflow_name, tag=tag, **kwargs) + + async def get_workflow_tags(self, workflow_name: str, **kwargs) -> List[TagAdapter]: + """Get all tags for a workflow definition. + + Args: + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TagAdapter instances + + Example: + ```python + tags = await metadata_client.get_workflow_tags("my_workflow") + for tag in tags: + print(f"{tag.key}: {tag.value}") + ``` + """ + return await self._tags_api.get_workflow_tags(name=workflow_name, **kwargs) + + async def set_workflow_tags(self, tags: List[TagAdapter], workflow_name: str, **kwargs) -> None: + """Set tags for a workflow definition (replaces existing tags). + + Args: + tags: List of tags to set + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [ + TagAdapter(key="environment", value="production"), + TagAdapter(key="team", value="platform") + ] + await metadata_client.set_workflow_tags(tags, "my_workflow") + ``` + """ + await self._tags_api.set_workflow_tags(name=workflow_name, tag=tags, **kwargs) + + async def add_task_tag(self, tag: TagAdapter, task_name: str, **kwargs) -> None: + """Add a tag to a task definition. + + Args: + tag: Tag to add + task_name: Name of the task + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tag = TagAdapter(key="category", value="data-processing") + await metadata_client.add_task_tag(tag, "my_task") + ``` + """ + await self._tags_api.add_task_tag(task_name=task_name, tag=tag, **kwargs) + + async def delete_task_tag(self, tag: TagAdapter, task_name: str, **kwargs) -> None: + """Delete a tag from a task definition. + + Args: + tag: Tag to delete + task_name: Name of the task + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tag = TagAdapter(key="category", value="data-processing") + await metadata_client.delete_task_tag(tag, "my_task") + ``` + """ + await self._tags_api.delete_task_tag(task_name=task_name, tag=tag, **kwargs) + + async def get_task_tags(self, task_name: str, **kwargs) -> List[TagAdapter]: + """Get all tags for a task definition. + + Args: + task_name: Name of the task + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TagAdapter instances + + Example: + ```python + tags = await metadata_client.get_task_tags("my_task") + for tag in tags: + print(f"{tag.key}: {tag.value}") + ``` + """ + return await self._tags_api.get_task_tags(task_name=task_name, **kwargs) + + async def set_task_tags(self, tags: List[TagAdapter], task_name: str, **kwargs) -> None: + """Set tags for a task definition (replaces existing tags). + + Args: + tags: List of tags to set + task_name: Name of the task + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [ + TagAdapter(key="category", value="data-processing"), + TagAdapter(key="priority", value="high") + ] + await metadata_client.set_task_tags(tags, "my_task") + ``` + """ + await self._tags_api.set_task_tags(task_name=task_name, tag=tags, **kwargs) + + async def set_workflow_rate_limit(self, rate_limit: str, workflow_name: str) -> None: + """Set rate limit for a workflow. + + Rate limits control how many instances of a workflow can execute concurrently. + + Args: + rate_limit: Rate limit value (e.g., "10" for max 10 concurrent executions) + workflow_name: Name of the workflow - async def get_workflow_def_latest_versions(self) -> List[WorkflowDefAdapter]: - """Get the latest version of all workflow definitions""" - return await self.get_all_workflow_defs() + Returns: + None - async def get_workflow_def_by_version(self, name: str, version: int) -> WorkflowDefAdapter: - """Get workflow definition by name and specific version""" - return await self.get_workflow_def(name, version=version) + Example: + ```python + # Limit to 5 concurrent executions + await metadata_client.set_workflow_rate_limit("5", "my_workflow") + ``` + """ + await self.remove_workflow_rate_limit(workflow_name=workflow_name) + rate_limit_tag = TagAdapter(key=workflow_name, type="RATE_LIMIT", value=rate_limit) + await self._tags_api.add_workflow_tag(name=workflow_name, tag=rate_limit_tag) - async def get_workflow_def_by_name(self, name: str) -> List[WorkflowDefAdapter]: - """Get all versions of workflow definition by name""" - return await self.get_workflow_defs_by_name(name) + async def get_workflow_rate_limit(self, workflow_name: str) -> Optional[str]: + """Get rate limit for a workflow. - async def add_workflow_tag(self, tag: TagAdapter, workflow_name: str): - await self.tags_api.add_workflow_tag(workflow_name, tag) + Args: + workflow_name: Name of the workflow - async def delete_workflow_tag(self, tag: TagAdapter, workflow_name: str): - await self.tags_api.delete_workflow_tag(workflow_name, tag) + Returns: + Rate limit value as string, or None if no rate limit is set - async def get_workflow_tags(self, workflow_name: str) -> List[TagAdapter]: - return await self.tags_api.get_workflow_tags(workflow_name) + Example: + ```python + limit = await metadata_client.get_workflow_rate_limit("my_workflow") + if limit: + print(f"Rate limit: {limit} concurrent executions") + else: + print("No rate limit set") + ``` + """ + tags = await self._tags_api.get_workflow_tags(name=workflow_name) + for tag in tags: + if tag.type == "RATE_LIMIT" and tag.key == workflow_name: + return tag.value - async def set_workflow_tags(self, tags: List[TagAdapter], workflow_name: str): - await self.tags_api.set_workflow_tags(workflow_name, tags) + return None - async def add_task_tag(self, tag: TagAdapter, task_name: str): - await self.tags_api.add_task_tag(task_name, tag) + async def remove_workflow_rate_limit(self, workflow_name: str) -> None: + """Remove rate limit from a workflow. - async def delete_task_tag(self, tag: TagAdapter, task_name: str): - await self.tags_api.delete_task_tag(task_name, tag) + Args: + workflow_name: Name of the workflow - async def get_task_tags(self, task_name: str) -> List[TagAdapter]: - return await self.tags_api.get_task_tags(task_name) + Returns: + None - async def set_task_tags(self, tags: List[TagAdapter], task_name: str): - await self.tags_api.set_task_tags(task_name, tags) + Example: + ```python + await metadata_client.remove_workflow_rate_limit("my_workflow") + ``` + """ + current_rate_limit = await self.get_workflow_rate_limit(workflow_name=workflow_name) + if current_rate_limit: + rate_limit_tag = TagAdapter( + key=workflow_name, type="RATE_LIMIT", value=current_rate_limit + ) + await self._tags_api.delete_workflow_tag(name=workflow_name, tag=rate_limit_tag) diff --git a/src/conductor/asyncio_client/orkes/orkes_prompt_client.py b/src/conductor/asyncio_client/orkes/orkes_prompt_client.py index c103aaaa8..e0b76f243 100644 --- a/src/conductor/asyncio_client/orkes/orkes_prompt_client.py +++ b/src/conductor/asyncio_client/orkes/orkes_prompt_client.py @@ -2,6 +2,9 @@ from typing import List, Optional +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated + from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.message_template_adapter import ( MessageTemplateAdapter, @@ -16,57 +19,309 @@ class OrkesPromptClient(OrkesBaseClient): def __init__(self, configuration: Configuration, api_client: ApiClient): + """Initialize the OrkesPromptClient with configuration and API client. + + Args: + configuration: Configuration object containing server settings and authentication + api_client: ApiClient instance for making API requests + + Example: + ```python + from conductor.asyncio_client.configuration.configuration import Configuration + from conductor.asyncio_client.adapters import ApiClient + + config = Configuration(server_api_url="http://localhost:8080/api") + api_client = ApiClient(configuration=config) + prompt_client = OrkesPromptClient(config, api_client) + ``` + """ super().__init__(configuration, api_client) # Message Template Operations async def save_message_template( - self, name: str, description: str, body: str, models: Optional[List[str]] = None + self, name: str, description: str, body: str, models: Optional[List[str]] = None, **kwargs ) -> None: - """Create or update a message template""" - await self.prompt_api.save_message_template(name, description, body, models=models) + """Create or update a message template. + + Message templates are reusable prompt templates for AI integrations with + variable substitution support. + + Args: + name: Unique name for the template + description: Human-readable description of the template's purpose + body: Template text with variables in ${variable} format + models: Optional list of AI models this template is compatible with + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Create a customer service template + await prompt_client.save_message_template( + name="customer_greeting", + description="Greeting template for customer service", + body="Hello ${customer_name}, welcome to ${company}! How can I help you today?", + models=["gpt-4", "gpt-3.5-turbo"] + ) + ``` + """ + await self._prompt_api.save_message_template( + name=name, description=description, body=body, models=models, **kwargs + ) + + async def get_message_template(self, name: str, **kwargs) -> MessageTemplateAdapter: + """Get a message template by name. + + Args: + name: Name of the template to retrieve + **kwargs: Additional optional parameters to pass to the API + + Returns: + MessageTemplateAdapter instance containing the template details + + Example: + ```python + template = await prompt_client.get_message_template("customer_greeting") + print(f"Template: {template.template}") + print(f"Models: {template.models}") + ``` + """ + return await self._prompt_api.get_message_template(name=name, **kwargs) + + async def get_message_templates(self, **kwargs) -> List[MessageTemplateAdapter]: + """Get all message templates. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of MessageTemplateAdapter instances + + Example: + ```python + templates = await prompt_client.get_message_templates() + for template in templates: + print(f"Template: {template.name} - {template.description}") + ``` + """ + return await self._prompt_api.get_message_templates(**kwargs) + + async def delete_message_template(self, name: str, **kwargs) -> None: + """Delete a message template. - async def get_message_template(self, name: str) -> MessageTemplateAdapter: - """Get a message template by name""" - return await self.prompt_api.get_message_template(name) + Args: + name: Name of the template to delete + **kwargs: Additional optional parameters to pass to the API - async def get_message_templates(self) -> List[MessageTemplateAdapter]: - """Get all message templates""" - return await self.prompt_api.get_message_templates() + Returns: + None - async def delete_message_template(self, name: str) -> None: - """Delete a message template""" - await self.prompt_api.delete_message_template(name) + Example: + ```python + await prompt_client.delete_message_template("old_template") + ``` + """ + await self._prompt_api.delete_message_template(name=name, **kwargs) async def create_message_templates( - self, message_templates: List[MessageTemplateAdapter] + self, message_templates: List[MessageTemplateAdapter], **kwargs ) -> None: - """Create multiple message templates in bulk""" - await self.prompt_api.create_message_templates(message_templates) + """Create multiple message templates in bulk. + + Args: + message_templates: List of template objects to create + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.message_template_adapter import MessageTemplateAdapter + + templates = [ + MessageTemplateAdapter( + name="greeting", + description="Greeting template", + template="Hello ${name}!" + ), + MessageTemplateAdapter( + name="farewell", + description="Farewell template", + template="Goodbye ${name}!" + ) + ] + await prompt_client.create_message_templates(templates) + ``` + """ + await self._prompt_api.create_message_templates( + message_template=message_templates, **kwargs + ) # Template Testing async def test_message_template( - self, prompt_template_test_request: PromptTemplateTestRequestAdapter + self, prompt_template_test_request: PromptTemplateTestRequestAdapter, **kwargs ) -> str: - """Test a prompt template with provided inputs""" - return await self.prompt_api.test_message_template(prompt_template_test_request) + """Test a prompt template with provided inputs. + + Tests how a template will be rendered with specific variables and AI model settings. + + Args: + prompt_template_test_request: Test request containing template, variables, and model config + **kwargs: Additional optional parameters to pass to the API + + Returns: + String containing the rendered/tested prompt result + + Example: + ```python + from conductor.asyncio_client.adapters.models.prompt_template_test_request_adapter import PromptTemplateTestRequestAdapter + + test_request = PromptTemplateTestRequestAdapter( + prompt="Hello ${name}, you have ${count} messages", + prompt_variables={"name": "John", "count": "5"}, + llm_provider="openai", + model="gpt-4", + temperature=0.7 + ) + result = await prompt_client.test_message_template(test_request) + print(f"Rendered template: {result}") + ``` + """ + return await self._prompt_api.test_message_template( + prompt_template_test_request=prompt_template_test_request, **kwargs + ) # Tag Management for Prompt Templates - async def put_tag_for_prompt_template(self, name: str, tags: List[TagAdapter]) -> None: - """Add tags to a prompt template""" - await self.prompt_api.put_tag_for_prompt_template(name, tags) + @deprecated( + "put_tag_for_prompt_template is deprecated; use update_tag_for_prompt_template instead" + ) + @typing_deprecated( + "put_tag_for_prompt_template is deprecated; use update_tag_for_prompt_template instead" + ) + async def put_tag_for_prompt_template( + self, name: str, tags: List[TagAdapter], **kwargs + ) -> None: + """Add tags to a prompt template. + + .. deprecated:: + Use update_tag_for_prompt_template instead. + + Args: + name: Name of the template + tags: List of tags to add + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [TagAdapter(key="category", value="customer-service")] + await prompt_client.put_tag_for_prompt_template("greeting", tags) + ``` + """ + await self._prompt_api.put_tag_for_prompt_template(name, tags, **kwargs) + + async def get_tags_for_prompt_template(self, name: str, **kwargs) -> List[TagAdapter]: + """Get tags associated with a prompt template. + + Args: + name: Name of the template + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TagAdapter instances + + Example: + ```python + tags = await prompt_client.get_tags_for_prompt_template("customer_greeting") + for tag in tags: + print(f"{tag.key}: {tag.value}") + ``` + """ + return await self._prompt_api.get_tags_for_prompt_template(name=name, **kwargs) + + async def update_tag_for_prompt_template( + self, prompt_name: str, tags: List[TagAdapter], **kwargs + ) -> None: + """Update tags for a prompt template. + + Args: + prompt_name: Name of the template + tags: List of tags to set + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [ + TagAdapter(key="category", value="customer-service"), + TagAdapter(key="version", value="v2") + ] + await prompt_client.update_tag_for_prompt_template("greeting", tags) + ``` + """ + await self._prompt_api.put_tag_for_prompt_template(name=prompt_name, tag=tags, **kwargs) + + async def delete_tag_for_prompt_template( + self, name: str, tags: List[TagAdapter], **kwargs + ) -> None: + """Delete tags from a prompt template. + + Args: + name: Name of the template + tags: List of tags to delete + **kwargs: Additional optional parameters to pass to the API - async def get_tags_for_prompt_template(self, name: str) -> List[TagAdapter]: - """Get tags associated with a prompt template""" - return await self.prompt_api.get_tags_for_prompt_template(name) + Returns: + None - async def delete_tag_for_prompt_template(self, name: str, tags: List[TagAdapter]) -> None: - """Delete tags from a prompt template""" - await self.prompt_api.delete_tag_for_prompt_template(name, tags) + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [TagAdapter(key="category", value="customer-service")] + await prompt_client.delete_tag_for_prompt_template("greeting", tags) + ``` + """ + await self._prompt_api.delete_tag_for_prompt_template(name=name, tag=tags, **kwargs) # Convenience Methods - async def create_simple_template(self, name: str, description: str, template_body: str) -> None: - """Create a simple message template with basic parameters""" - await self.save_message_template(name, description, template_body) + async def create_simple_template( + self, name: str, description: str, template_body: str, **kwargs + ) -> None: + """Create a simple message template with basic parameters. + + Convenience method for creating templates without specifying models. + + Args: + name: Unique name for the template + description: Description of the template + template_body: Template text with variables + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await prompt_client.create_simple_template( + "simple_greeting", + "Basic greeting", + "Hello ${name}!" + ) + ``` + """ + await self.save_message_template(name, description, template_body, **kwargs) async def update_template( self, @@ -74,23 +329,81 @@ async def update_template( description: str, template_body: str, models: Optional[List[str]] = None, + **kwargs, ) -> None: - """Update an existing message template (alias for save_message_template)""" - await self.save_message_template(name, description, template_body, models) - - async def template_exists(self, name: str) -> bool: - """Check if a message template exists""" + """Update an existing message template (alias for save_message_template). + + Args: + name: Name of the template to update + description: Updated description + template_body: Updated template text + models: Optional list of compatible AI models + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await prompt_client.update_template( + "customer_greeting", + "Updated greeting template", + "Hi ${customer_name}, welcome back!", + models=["gpt-4"] + ) + ``` + """ + await self.save_message_template(name, description, template_body, models, **kwargs) + + async def template_exists(self, name: str, **kwargs) -> bool: + """Check if a message template exists. + + Args: + name: Name of the template to check + **kwargs: Additional optional parameters to pass to the API + + Returns: + True if template exists, False otherwise + + Example: + ```python + if await prompt_client.template_exists("customer_greeting"): + print("Template exists") + else: + print("Template not found") + ``` + """ try: - await self.get_message_template(name) + await self.get_message_template(name, **kwargs) return True except Exception: return False async def get_templates_by_tag( - self, tag_key: str, tag_value: str + self, tag_key: str, tag_value: str, **kwargs ) -> List[MessageTemplateAdapter]: - """Get all templates that have a specific tag (requires filtering on client side)""" - all_templates = await self.get_message_templates() + """Get all templates that have a specific tag. + + Note: This method fetches all templates and filters client-side. + + Args: + tag_key: Tag key to filter by + tag_value: Tag value to filter by + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of MessageTemplateAdapter instances with matching tag + + Example: + ```python + # Get all customer service templates + templates = await prompt_client.get_templates_by_tag( + "category", + "customer-service" + ) + ``` + """ + all_templates = await self.get_message_templates(**kwargs) matching_templates = [] for template in all_templates: @@ -106,62 +419,223 @@ async def get_templates_by_tag( return matching_templates async def clone_template( - self, source_name: str, target_name: str, new_description: Optional[str] = None + self, source_name: str, target_name: str, new_description: Optional[str] = None, **kwargs ) -> None: - """Clone an existing template with a new name""" - source_template = await self.get_message_template(source_name) + """Clone an existing template with a new name. + + Args: + source_name: Name of the template to clone + target_name: Name for the cloned template + new_description: Optional description for the clone. If None, uses "Clone of {original}" + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Clone a template + await prompt_client.clone_template( + "customer_greeting", + "partner_greeting", + "Greeting template for partners" + ) + ``` + """ + source_template = await self.get_message_template(source_name, **kwargs) description = new_description or f"Clone of {source_template.description}" await self.save_message_template( - target_name, - description, - source_template.template or "", + name=target_name, + description=description, + body=source_template.template or "", models=(source_template.models if hasattr(source_template, "models") else None), + **kwargs, ) - async def bulk_delete_templates(self, template_names: List[str]) -> None: - """Delete multiple templates in bulk""" + async def bulk_delete_templates(self, template_names: List[str], **kwargs) -> None: + """Delete multiple templates in bulk. + + Continues deleting even if some deletions fail. + + Args: + template_names: List of template names to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + templates_to_delete = ["old_template1", "old_template2", "old_template3"] + await prompt_client.bulk_delete_templates(templates_to_delete) + ``` + """ for name in template_names: try: - await self.delete_message_template(name) + await self.delete_message_template(name=name, **kwargs) except Exception: # noqa: PERF203 continue # Legacy compatibility methods (aliasing new method names to match the original draft) - async def save_prompt(self, name: str, description: str, prompt_template: str) -> None: - """Legacy method: Create or update a message template""" - await self.save_message_template(name, description, prompt_template) + async def save_prompt( + self, name: str, description: str, prompt_template: str, **kwargs + ) -> None: + """Create or update a message template (legacy alias). + + Args: + name: Name of the template + description: Description of the template + prompt_template: Template text + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await prompt_client.save_prompt( + "greeting", + "Customer greeting", + "Hello ${customer}!" + ) + ``` + """ + await self.save_message_template( + name=name, description=description, body=prompt_template, **kwargs + ) + + async def get_prompt(self, name: str, **kwargs) -> MessageTemplateAdapter: + """Get a message template by name (legacy alias). + + Args: + name: Name of the template + **kwargs: Additional optional parameters to pass to the API + + Returns: + MessageTemplateAdapter instance + + Example: + ```python + prompt = await prompt_client.get_prompt("greeting") + ``` + """ + return await self.get_message_template(name=name, **kwargs) + + async def get_prompts(self, **kwargs) -> List[MessageTemplateAdapter]: + """Get all message templates (legacy alias). + + Args: + **kwargs: Additional optional parameters to pass to the API - async def get_prompt(self, name: str) -> MessageTemplateAdapter: - """Legacy method: Get a message template by name""" - return await self.get_message_template(name) + Returns: + List of MessageTemplateAdapter instances - async def delete_prompt(self, name: str) -> None: - """Legacy method: Delete a message template""" - await self.delete_message_template(name) + Example: + ```python + prompts = await prompt_client.get_prompts() + ``` + """ + return await self._prompt_api.get_message_templates(**kwargs) - async def list_prompts(self) -> List[MessageTemplateAdapter]: - """Legacy method: Get all message templates""" - return await self.get_message_templates() + async def delete_prompt(self, name: str, **kwargs) -> None: + """Delete a message template (legacy alias). + + Args: + name: Name of the template to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await prompt_client.delete_prompt("old_template") + ``` + """ + await self.delete_message_template(name=name, **kwargs) + + async def list_prompts(self, **kwargs) -> List[MessageTemplateAdapter]: + """Get all message templates (legacy alias). + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of MessageTemplateAdapter instances + + Example: + ```python + prompts = await prompt_client.list_prompts() + ``` + """ + return await self.get_message_templates(**kwargs) # Template Management Utilities - async def get_template_count(self) -> int: - """Get the total number of message templates""" - templates = await self.get_message_templates() + async def get_template_count(self, **kwargs) -> int: + """Get the total number of message templates. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Total count of templates as integer + + Example: + ```python + count = await prompt_client.get_template_count() + print(f"Total templates: {count}") + ``` + """ + templates = await self.get_message_templates(**kwargs) return len(templates) - async def search_templates_by_name(self, name_pattern: str) -> List[MessageTemplateAdapter]: - """Search templates by name pattern (case-insensitive)""" - all_templates = await self.get_message_templates() + async def search_templates_by_name( + self, name_pattern: str, **kwargs + ) -> List[MessageTemplateAdapter]: + """Search templates by name pattern (case-insensitive). + + Args: + name_pattern: Pattern to search for in template names + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of MessageTemplateAdapter instances matching the pattern + + Example: + ```python + # Find all templates with "greeting" in the name + templates = await prompt_client.search_templates_by_name("greeting") + for t in templates: + print(f"Found: {t.name}") + ``` + """ + all_templates = await self.get_message_templates(**kwargs) return [ template for template in all_templates if template.name and name_pattern.lower() in template.name.lower() ] - async def get_templates_with_model(self, model_name: str) -> List[MessageTemplateAdapter]: - """Get templates that use a specific AI model""" - all_templates = await self.get_message_templates() + async def get_templates_with_model( + self, model_name: str, **kwargs + ) -> List[MessageTemplateAdapter]: + """Get templates that use a specific AI model. + + Args: + model_name: Name of the AI model (e.g., "gpt-4", "gpt-3.5-turbo") + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of MessageTemplateAdapter instances compatible with the model + + Example: + ```python + # Get all templates compatible with GPT-4 + gpt4_templates = await prompt_client.get_templates_with_model("gpt-4") + ``` + """ + all_templates = await self.get_message_templates(**kwargs) matching_templates = [] matching_templates = [ @@ -181,7 +655,42 @@ async def test_prompt( temperature: float = 0.1, top_p: float = 0.9, stop_words: Optional[List[str]] = None, + **kwargs, ) -> str: + """Test a prompt with variables and AI model configuration. + + Convenience method that constructs a test request and executes it. + + Args: + prompt_text: The prompt template text with variables + variables: Dictionary of variable values to substitute + ai_integration: Name of the AI integration provider + text_complete_model: Name of the AI model to use + temperature: Sampling temperature (0.0 to 1.0). Lower is more deterministic. Defaults to 0.1 + top_p: Nucleus sampling parameter. Defaults to 0.9 + stop_words: Optional list of stop words/sequences + **kwargs: Additional optional parameters to pass to the API + + Returns: + String containing the AI model's response + + Example: + ```python + # Test a prompt with GPT-4 + result = await prompt_client.test_prompt( + prompt_text="Summarize this for ${audience}: ${text}", + variables={ + "audience": "executives", + "text": "Long technical document..." + }, + ai_integration="my-openai", + text_complete_model="gpt-4", + temperature=0.3, + top_p=0.95 + ) + print(f"AI Response: {result}") + ``` + """ request = PromptTemplateTestRequestAdapter( prompt=prompt_text, llm_provider=ai_integration, @@ -191,4 +700,6 @@ async def test_prompt( stop_words=stop_words, top_p=top_p, ) - return await self.prompt_api.test_message_template(request) + return await self._prompt_api.test_message_template( + prompt_template_test_request=request, **kwargs + ) diff --git a/src/conductor/asyncio_client/orkes/orkes_scheduler_client.py b/src/conductor/asyncio_client/orkes/orkes_scheduler_client.py index 88d99d30f..3b840f8ca 100644 --- a/src/conductor/asyncio_client/orkes/orkes_scheduler_client.py +++ b/src/conductor/asyncio_client/orkes/orkes_scheduler_client.py @@ -2,6 +2,9 @@ from typing import Dict, List, Optional +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated + from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.save_schedule_request_adapter import ( SaveScheduleRequestAdapter, @@ -25,45 +28,328 @@ class OrkesSchedulerClient(OrkesBaseClient): def __init__(self, configuration: Configuration, api_client: ApiClient): + """Initialize the OrkesSchedulerClient with configuration and API client. + + Args: + configuration: Configuration object containing server settings and authentication + api_client: ApiClient instance for making API requests + + Example: + ```python + from conductor.asyncio_client.configuration.configuration import Configuration + from conductor.asyncio_client.adapters import ApiClient + + config = Configuration(server_api_url="http://localhost:8080/api") + api_client = ApiClient(configuration=config) + scheduler_client = OrkesSchedulerClient(config, api_client) + ``` + """ super().__init__(configuration, api_client) # Core Schedule Operations + @deprecated("save_schedule is deprecated; use save_schedule_validated instead") + @typing_deprecated("save_schedule is deprecated; use save_schedule_validated instead") async def save_schedule(self, save_schedule_request: SaveScheduleRequestAdapter) -> object: - """Create or update a schedule for a specified workflow""" - return await self.scheduler_api.save_schedule(save_schedule_request) + """Create or update a schedule for a specified workflow. + + .. deprecated:: + Use save_schedule_validated instead for type-safe validated responses. + + Args: + save_schedule_request: Complete schedule configuration + + Returns: + Raw response object from the API + + Example: + ```python + from conductor.asyncio_client.adapters.models.save_schedule_request_adapter import SaveScheduleRequestAdapter + + schedule = SaveScheduleRequestAdapter( + name="daily_report", + cron_expression="0 9 * * *", + start_workflow_request=start_request + ) + await scheduler_client.save_schedule(schedule) + ``` + """ + return await self._scheduler_api.save_schedule(save_schedule_request) + + async def save_schedule_validated( + self, save_schedule_request: SaveScheduleRequestAdapter, **kwargs + ) -> None: + """Create or update a schedule for a specified workflow. + + Args: + save_schedule_request: Complete schedule configuration + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.save_schedule_request_adapter import SaveScheduleRequestAdapter + from conductor.asyncio_client.adapters.models.start_workflow_request_adapter import StartWorkflowRequestAdapter + + start_request = StartWorkflowRequestAdapter( + name="daily_report_workflow", + input={"date": "today"} + ) + schedule = SaveScheduleRequestAdapter( + name="daily_report", + cron_expression="0 9 * * *", # Every day at 9 AM + start_workflow_request=start_request, + timezone="America/New_York" + ) + await scheduler_client.save_schedule_validated(schedule) + ``` + """ + await self._scheduler_api.save_schedule( + save_schedule_request=save_schedule_request, **kwargs + ) + + async def get_schedule(self, name: str, **kwargs) -> WorkflowScheduleAdapter: + """Get a workflow schedule by name. - async def get_schedule(self, name: str) -> WorkflowScheduleAdapter: - """Get a workflow schedule by name""" - return await self.scheduler_api.get_schedule(name) + Args: + name: Name of the schedule to retrieve + **kwargs: Additional optional parameters to pass to the API + Returns: + WorkflowScheduleAdapter instance containing the schedule details + + Example: + ```python + schedule = await scheduler_client.get_schedule("daily_report") + print(f"Cron: {schedule.cron_expression}") + print(f"Paused: {schedule.paused}") + ``` + """ + return await self._scheduler_api.get_schedule(name=name, **kwargs) + + @deprecated("delete_schedule is deprecated; use delete_schedule_validated instead") + @typing_deprecated("delete_schedule is deprecated; use delete_schedule_validated instead") async def delete_schedule(self, name: str) -> object: - """Delete an existing workflow schedule by name""" - return await self.scheduler_api.delete_schedule(name) + """Delete an existing workflow schedule by name. + + .. deprecated:: + Use delete_schedule_validated instead for type-safe validated responses. + + Args: + name: Name of the schedule to delete + + Returns: + Raw response object from the API + + Example: + ```python + await scheduler_client.delete_schedule("old_daily_report") + ``` + """ + return await self._scheduler_api.delete_schedule(name) + + async def delete_schedule_validated(self, name: str, **kwargs) -> None: + """Delete an existing workflow schedule by name. + + Args: + name: Name of the schedule to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await scheduler_client.delete_schedule_validated("old_daily_report") + ``` + """ + await self._scheduler_api.delete_schedule(name=name, **kwargs) async def get_all_schedules( - self, workflow_name: Optional[str] = None + self, workflow_name: Optional[str] = None, **kwargs ) -> List[WorkflowScheduleModelAdapter]: - """Get all workflow schedules, optionally filtered by workflow name""" - return await self.scheduler_api.get_all_schedules(workflow_name=workflow_name) + """Get all workflow schedules, optionally filtered by workflow name. + + Args: + workflow_name: Optional workflow name to filter schedules + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowScheduleModelAdapter instances + + Example: + ```python + # Get all schedules + all_schedules = await scheduler_client.get_all_schedules() + + # Get schedules for a specific workflow + report_schedules = await scheduler_client.get_all_schedules("daily_report_workflow") + for schedule in report_schedules: + print(f"Schedule: {schedule.name}, Cron: {schedule.cron_expression}") + ``` + """ + return await self._scheduler_api.get_all_schedules(workflow_name=workflow_name, **kwargs) # Schedule Control Operations + @deprecated("pause_schedule is deprecated; use pause_schedule_validated instead") + @typing_deprecated("pause_schedule is deprecated; use pause_schedule_validated instead") async def pause_schedule(self, name: str) -> object: - """Pause a workflow schedule""" - return await self.scheduler_api.pause_schedule(name) + """Pause a workflow schedule. + + .. deprecated:: + Use pause_schedule_validated instead for type-safe validated responses. + + Args: + name: Name of the schedule to pause + Returns: + Raw response object from the API + + Example: + ```python + await scheduler_client.pause_schedule("daily_report") + ``` + """ + return await self._scheduler_api.pause_schedule(name) + + async def pause_schedule_validated(self, name: str, **kwargs) -> None: + """Pause a workflow schedule. + + Args: + name: Name of the schedule to pause + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Pause a schedule temporarily + await scheduler_client.pause_schedule_validated("daily_report") + ``` + """ + await self._scheduler_api.pause_schedule(name=name, **kwargs) + + @deprecated("resume_schedule is deprecated; use resume_schedule_validated instead") + @typing_deprecated("resume_schedule is deprecated; use resume_schedule_validated instead") async def resume_schedule(self, name: str) -> object: - """Resume a paused workflow schedule""" - return await self.scheduler_api.resume_schedule(name) + """Resume a paused workflow schedule. + + .. deprecated:: + Use resume_schedule_validated instead for type-safe validated responses. + + Args: + name: Name of the schedule to resume + Returns: + Raw response object from the API + + Example: + ```python + await scheduler_client.resume_schedule("daily_report") + ``` + """ + return await self._scheduler_api.resume_schedule(name) + + async def resume_schedule_validated(self, name: str, **kwargs) -> None: + """Resume a paused workflow schedule. + + Args: + name: Name of the schedule to resume + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Resume a paused schedule + await scheduler_client.resume_schedule_validated("daily_report") + ``` + """ + await self._scheduler_api.resume_schedule(name=name, **kwargs) + + @deprecated("pause_all_schedules is deprecated; use pause_all_schedules_validated instead") + @typing_deprecated( + "pause_all_schedules is deprecated; use pause_all_schedules_validated instead" + ) async def pause_all_schedules(self) -> Dict[str, object]: - """Pause all workflow schedules""" - return await self.scheduler_api.pause_all_schedules() + """Pause all workflow schedules. + + .. deprecated:: + Use pause_all_schedules_validated instead for type-safe validated responses. + + Returns: + Dictionary with pause operation results - async def resume_all_schedules(self) -> Dict[str, object]: - """Resume all paused workflow schedules""" - return await self.scheduler_api.resume_all_schedules() + Example: + ```python + await scheduler_client.pause_all_schedules() + ``` + """ + return await self._scheduler_api.pause_all_schedules() + + async def pause_all_schedules_validated(self, **kwargs) -> None: + """Pause all workflow schedules. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Pause all schedules for maintenance + await scheduler_client.pause_all_schedules_validated() + ``` + """ + await self._scheduler_api.pause_all_schedules(**kwargs) + + @deprecated("resume_all_schedules is deprecated; use resume_all_schedules_validated instead") + @typing_deprecated( + "resume_all_schedules is deprecated; use resume_all_schedules_validated instead" + ) + async def resume_all_schedules(self, **kwargs) -> Dict[str, object]: + """Resume all paused workflow schedules. + + .. deprecated:: + Use resume_all_schedules_validated instead for type-safe validated responses. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary with resume operation results + + Example: + ```python + await scheduler_client.resume_all_schedules() + ``` + """ + return await self._scheduler_api.resume_all_schedules(**kwargs) + + async def resume_all_schedules_validated(self, **kwargs) -> None: + """Resume all paused workflow schedules. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Resume all schedules after maintenance + await scheduler_client.resume_all_schedules_validated() + ``` + """ + await self._scheduler_api.resume_all_schedules(**kwargs) # Schedule Search and Discovery + @deprecated("search_schedules is deprecated; use search_schedule_executions instead") + @typing_deprecated("search_schedules is deprecated; use search_schedule_executions instead") async def search_schedules( self, start: int = 0, @@ -71,51 +357,333 @@ async def search_schedules( sort: Optional[str] = None, free_text: Optional[str] = None, query: Optional[str] = None, + **kwargs, ) -> SearchResultWorkflowScheduleExecutionModelAdapter: - """Search for workflow schedules with advanced filtering""" - return await self.scheduler_api.search_v2( - start=start, size=size, sort=sort, free_text=free_text, query=query + """Search for workflow schedules with advanced filtering. + + .. deprecated:: + Use search_schedule_executions instead for consistent API interface. + + Args: + start: Starting index for pagination (default: 0) + size: Number of results to return (default: 100) + sort: Sort order specification + free_text: Free text search query + query: Structured query string + **kwargs: Additional optional parameters to pass to the API + + Returns: + SearchResultWorkflowScheduleExecutionModelAdapter with matching schedules + + Example: + ```python + results = await scheduler_client.search_schedules( + start=0, + size=50, + query="workflowName:daily_report" + ) + ``` + """ + return await self._scheduler_api.search_v2( + start=start, size=size, sort=sort, free_text=free_text, query=query, **kwargs ) - async def get_schedules_by_tag(self, tag_value: str) -> List[WorkflowScheduleModelAdapter]: - """Get schedules filtered by tag key and value""" - return await self.scheduler_api.get_schedules_by_tag(tag_value) + async def search_schedule_executions( + self, + start: Optional[int] = None, + size: Optional[int] = None, + sort: Optional[str] = None, + free_text: Optional[str] = None, + query: Optional[str] = None, + **kwargs, + ) -> SearchResultWorkflowScheduleExecutionModelAdapter: + """Search for workflow schedule executions with advanced filtering. + + Args: + start: Starting index for pagination + size: Number of results to return + sort: Sort order specification + free_text: Free text search query + query: Structured query string (e.g., "workflowName:my_workflow") + **kwargs: Additional optional parameters to pass to the API + + Returns: + SearchResultWorkflowScheduleExecutionModelAdapter with matching schedule executions + + Example: + ```python + # Search for schedules by workflow name + results = await scheduler_client.search_schedule_executions( + start=0, + size=50, + query="workflowName:daily_report" + ) + print(f"Found {results.total_hits} schedules") + + # Free text search + results = await scheduler_client.search_schedule_executions( + free_text="daily report" + ) + ``` + """ + if start: + kwargs.update({"start": start}) + if size: + kwargs.update({"size": size}) + if sort: + kwargs.update({"sort": sort}) + if free_text: + kwargs.update({"free_text": free_text}) + if query: + kwargs.update({"query": query}) + return await self._scheduler_api.search_v2(**kwargs) + + async def get_schedules_by_tag( + self, tag_value: str, **kwargs + ) -> List[WorkflowScheduleModelAdapter]: + """Get schedules filtered by tag value. + + Args: + tag_value: Tag value to filter by + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowScheduleModelAdapter instances with matching tag + + Example: + ```python + # Get all schedules with specific tag + schedules = await scheduler_client.get_schedules_by_tag("production") + for schedule in schedules: + print(f"Schedule: {schedule.name}") + ``` + """ + return await self._scheduler_api.get_schedules_by_tag(tag=tag_value, **kwargs) # Schedule Planning & Analysis + @deprecated( + "get_next_few_schedules is deprecated; use get_next_few_schedule_execution_times instead" + ) + @typing_deprecated( + "get_next_few_schedules is deprecated; use get_next_few_schedule_execution_times instead" + ) async def get_next_few_schedules( self, cron_expression: str, schedule_start_time: Optional[int] = None, schedule_end_time: Optional[int] = None, limit: Optional[int] = None, + **kwargs, ) -> List[int]: - """Get the next execution times for a cron expression""" - return await self.scheduler_api.get_next_few_schedules( + """Get the next execution times for a cron expression. + + .. deprecated:: + Use get_next_few_schedule_execution_times instead. + + Args: + cron_expression: Cron expression to evaluate + schedule_start_time: Optional start time (epoch milliseconds) + schedule_end_time: Optional end time (epoch milliseconds) + limit: Maximum number of execution times to return + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of execution times as epoch milliseconds + + Example: + ```python + times = await scheduler_client.get_next_few_schedules("0 9 * * *", limit=5) + ``` + """ + return await self._scheduler_api.get_next_few_schedules( cron_expression=cron_expression, schedule_start_time=schedule_start_time, schedule_end_time=schedule_end_time, limit=limit, + **kwargs, ) # Tag Management for Schedules - async def put_tag_for_schedule(self, name: str, tags: List[TagAdapter]) -> None: - """Add tags to a workflow schedule""" - await self.scheduler_api.put_tag_for_schedule(name, tags) + @deprecated("put_tag_for_schedule is deprecated; use set_tags_for_schedule instead") + @typing_deprecated("put_tag_for_schedule is deprecated; use set_tags_for_schedule instead") + async def put_tag_for_schedule(self, name: str, tags: List[TagAdapter], **kwargs) -> None: + """Add tags to a workflow schedule. + + .. deprecated:: + Use set_tags_for_schedule instead. + + Args: + name: Name of the schedule + tags: List of tags to add + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [TagAdapter(key="environment", value="production")] + await scheduler_client.put_tag_for_schedule("daily_report", tags) + ``` + """ + await self._scheduler_api.put_tag_for_schedule(name=name, tag=tags, **kwargs) + + async def set_tags_for_schedule(self, name: str, tags: List[TagAdapter], **kwargs) -> None: + """Set tags for a workflow schedule. + + Args: + name: Name of the schedule + tags: List of tags to set + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [ + TagAdapter(key="environment", value="production"), + TagAdapter(key="team", value="data") + ] + await scheduler_client.set_tags_for_schedule("daily_report", tags) + ``` + """ + await self._scheduler_api.put_tag_for_schedule(name=name, tag=tags, **kwargs) + + @deprecated("get_tags_for_schedule is deprecated; use get_scheduler_tags instead") + @typing_deprecated("get_tags_for_schedule is deprecated; use get_scheduler_tags instead") + async def get_tags_for_schedule(self, name: str, **kwargs) -> List[TagAdapter]: + """Get tags associated with a workflow schedule. - async def get_tags_for_schedule(self, name: str) -> List[TagAdapter]: - """Get tags associated with a workflow schedule""" - return await self.scheduler_api.get_tags_for_schedule(name) + .. deprecated:: + Use get_scheduler_tags instead. - async def delete_tag_for_schedule(self, name: str, tags: List[TagAdapter]) -> None: - """Delete specific tags from a workflow schedule""" - await self.scheduler_api.delete_tag_for_schedule(name, tags) + Args: + name: Name of the schedule + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TagAdapter instances + + Example: + ```python + tags = await scheduler_client.get_tags_for_schedule("daily_report") + ``` + """ + return await self._scheduler_api.get_tags_for_schedule(name, **kwargs) + + async def get_scheduler_tags(self, name: str, **kwargs) -> List[TagAdapter]: + """Get tags associated with a workflow schedule. + + Args: + name: Name of the schedule + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TagAdapter instances + + Example: + ```python + tags = await scheduler_client.get_scheduler_tags("daily_report") + for tag in tags: + print(f"{tag.key}: {tag.value}") + ``` + """ + return await self._scheduler_api.get_tags_for_schedule(name=name, **kwargs) + + @deprecated("delete_tag_for_schedule is deprecated; use delete_scheduler_tags instead") + @typing_deprecated("delete_tag_for_schedule is deprecated; use delete_scheduler_tags instead") + async def delete_tag_for_schedule(self, name: str, tags: List[TagAdapter], **kwargs) -> None: + """Delete specific tags from a workflow schedule. + + .. deprecated:: + Use delete_scheduler_tags instead. + + Args: + name: Name of the schedule + tags: List of tags to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [TagAdapter(key="environment", value="production")] + await scheduler_client.delete_tag_for_schedule("daily_report", tags) + ``` + """ + await self._scheduler_api.delete_tag_for_schedule(name, tags, **kwargs) + + async def delete_scheduler_tags(self, tags: List[TagAdapter], name: str, **kwargs) -> None: + """Delete specific tags from a workflow schedule. + + Args: + tags: List of tags to delete + name: Name of the schedule + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [TagAdapter(key="environment", value="production")] + await scheduler_client.delete_scheduler_tags(tags, "daily_report") + ``` + """ + await self._scheduler_api.delete_tag_for_schedule(name=name, tag=tags, **kwargs) # Schedule Execution Management + @deprecated( + "requeue_all_execution_records is deprecated; use requeue_all_execution_records_validated instead" + ) + @typing_deprecated( + "requeue_all_execution_records is deprecated; use requeue_all_execution_records_validated instead" + ) async def requeue_all_execution_records(self) -> Dict[str, object]: - """Requeue all execution records for scheduled workflows""" - return await self.scheduler_api.requeue_all_execution_records() + """Requeue all execution records for scheduled workflows. + + .. deprecated:: + Use requeue_all_execution_records_validated instead for type-safe validated responses. + + Returns: + Dictionary with requeue operation results + + Example: + ```python + await scheduler_client.requeue_all_execution_records() + ``` + """ + return await self._scheduler_api.requeue_all_execution_records() + + async def requeue_all_execution_records_validated(self, **kwargs) -> None: + """Requeue all execution records for scheduled workflows. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await scheduler_client.requeue_all_execution_records_validated() + ``` + """ + await self._scheduler_api.requeue_all_execution_records(**kwargs) # Convenience Methods + @deprecated("create_schedule is deprecated; use create_schedule_validated instead") + @typing_deprecated("create_schedule is deprecated; use create_schedule_validated instead") async def create_schedule( self, name: str, @@ -126,7 +694,34 @@ async def create_schedule( timezone: Optional[str] = None, run_catch_up: bool = False, ) -> object: - """Create a new workflow schedule with simplified parameters""" + """Create a new workflow schedule with simplified parameters. + + .. deprecated:: + Use create_schedule_validated instead for type-safe validated responses. + + Args: + name: Unique name for the schedule + cron_expression: Cron expression defining when to run (e.g., "0 9 * * *" for daily at 9 AM) + workflow_name: Name of the workflow to execute + workflow_version: Optional workflow version. If None, uses latest version + start_workflow_request: Optional dict with workflow input parameters + timezone: Optional timezone (e.g., "America/New_York"). If None, uses UTC + run_catch_up: If True, runs missed executions when schedule is resumed + + Returns: + Raw response object from the API + + Example: + ```python + await scheduler_client.create_schedule( + name="daily_report", + cron_expression="0 9 * * *", + workflow_name="generate_report", + timezone="America/New_York", + start_workflow_request={"input": {"date": "today"}} + ) + ``` + """ # Create the start workflow request if not provided if start_workflow_request is None: @@ -152,6 +747,83 @@ async def create_schedule( return await self.save_schedule(save_request) + async def create_schedule_validated( + self, + name: str, + cron_expression: str, + workflow_name: str, + workflow_version: Optional[int] = None, + start_workflow_request: Optional[Dict] = None, + timezone: Optional[str] = None, + run_catch_up: bool = False, + **kwargs, + ) -> None: + """Create a new workflow schedule with simplified parameters. + + Convenience method that simplifies schedule creation by accepting basic parameters + instead of requiring full adapter objects. + + Args: + name: Unique name for the schedule + cron_expression: Cron expression defining when to run (e.g., "0 9 * * *" for daily at 9 AM) + workflow_name: Name of the workflow to execute + workflow_version: Optional workflow version. If None, uses latest version + start_workflow_request: Optional dict with workflow parameters (input, correlationId, priority, taskToDomain) + timezone: Optional timezone (e.g., "America/New_York"). If None, uses UTC + run_catch_up: If True, runs missed executions when schedule is resumed + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Create a daily report schedule + await scheduler_client.create_schedule_validated( + name="daily_report", + cron_expression="0 9 * * MON-FRI", # Weekdays at 9 AM + workflow_name="generate_report", + workflow_version=1, + timezone="America/New_York", + start_workflow_request={ + "input": {"report_type": "daily", "format": "pdf"}, + "priority": 5 + } + ) + + # Create a simple hourly schedule + await scheduler_client.create_schedule_validated( + name="hourly_sync", + cron_expression="0 * * * *", # Every hour + workflow_name="data_sync" + ) + ``` + """ + # Create the start workflow request if not provided + if start_workflow_request is None: + start_workflow_request = {} + + start_req = StartWorkflowRequestAdapter( + name=workflow_name, + version=workflow_version, + input=start_workflow_request.get("input", {}), + correlation_id=start_workflow_request.get("correlationId"), + priority=start_workflow_request.get("priority"), + task_to_domain=start_workflow_request.get("taskToDomain", {}), + ) + + save_request = SaveScheduleRequestAdapter( + name=name, + cron_expression=cron_expression, + start_workflow_request=start_req, + paused=False, + run_catch_up=run_catch_up, + timezone=timezone, + ) + await self.save_schedule_validated(save_schedule_request=save_request, **kwargs) + + @deprecated("update_schedule is deprecated; use update_schedule_validated instead") + @typing_deprecated("update_schedule is deprecated; use update_schedule_validated instead") async def update_schedule( self, name: str, @@ -159,8 +831,33 @@ async def update_schedule( paused: Optional[bool] = None, run_catch_up: Optional[bool] = None, timezone: Optional[str] = None, + **kwargs, ) -> object: - """Update an existing schedule with new parameters""" + """Update an existing schedule with new parameters. + + .. deprecated:: + Use update_schedule_validated instead for type-safe validated responses. + + Args: + name: Name of the schedule to update + cron_expression: Optional new cron expression + paused: Optional paused status + run_catch_up: Optional run catch-up setting + timezone: Optional new timezone + **kwargs: Additional optional parameters to pass to the API + + Returns: + Raw response object from the API + + Example: + ```python + # Update schedule to run every 2 hours instead + await scheduler_client.update_schedule( + "daily_report", + cron_expression="0 */2 * * *" + ) + ``` + """ # Get the existing schedule existing_schedule = await self.get_schedule(name) @@ -178,80 +875,393 @@ async def update_schedule( zone_id=timezone or existing_schedule.zone_id, ) - return await self.save_schedule(save_request) + return await self.save_schedule(save_request, **kwargs) + + async def update_schedule_validated( + self, + name: str, + cron_expression: Optional[str] = None, + paused: Optional[bool] = None, + run_catch_up: Optional[bool] = None, + timezone: Optional[str] = None, + **kwargs, + ) -> None: + """Update an existing schedule with new parameters. + + Fetches the existing schedule and updates only the specified fields, + preserving all other settings. + + Args: + name: Name of the schedule to update + cron_expression: Optional new cron expression + paused: Optional paused status + run_catch_up: Optional run catch-up setting + timezone: Optional new timezone + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Update schedule to run every 2 hours + await scheduler_client.update_schedule_validated( + "daily_report", + cron_expression="0 */2 * * *" + ) + + # Pause a schedule + await scheduler_client.update_schedule_validated( + "daily_report", + paused=True + ) + + # Change timezone + await scheduler_client.update_schedule_validated( + "daily_report", + timezone="Europe/London" + ) + ``` + """ + existing_schedule = await self.get_schedule(name=name, **kwargs) + + # Create updated save request + save_request = SaveScheduleRequestAdapter( + name=name, + cron_expression=cron_expression or existing_schedule.cron_expression, + start_workflow_request=existing_schedule.start_workflow_request, + paused=paused if paused is not None else existing_schedule.paused, + run_catchup_schedule_instances=( + run_catch_up + if run_catch_up is not None + else existing_schedule.run_catchup_schedule_instances + ), + zone_id=timezone or existing_schedule.zone_id, + ) + + await self.save_schedule_validated(save_schedule_request=save_request, **kwargs) + + async def schedule_exists(self, name: str, **kwargs) -> bool: + """Check if a schedule exists. - async def schedule_exists(self, name: str) -> bool: - """Check if a schedule exists""" + Args: + name: Name of the schedule to check + **kwargs: Additional optional parameters to pass to the API + + Returns: + True if schedule exists, False otherwise + + Example: + ```python + if await scheduler_client.schedule_exists("daily_report"): + print("Schedule exists") + else: + print("Schedule not found") + ``` + """ try: - await self.get_schedule(name) + await self.get_schedule(name=name, **kwargs) return True except Exception: return False async def get_schedules_by_workflow( - self, workflow_name: str + self, workflow_name: str, **kwargs ) -> List[WorkflowScheduleModelAdapter]: - """Get all schedules for a specific workflow""" - return await self.get_all_schedules(workflow_name=workflow_name) + """Get all schedules for a specific workflow. + + Args: + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowScheduleModelAdapter instances - async def get_active_schedules(self) -> List[WorkflowScheduleModelAdapter]: - """Get all active (non-paused) schedules""" - all_schedules = await self.get_all_schedules() + Example: + ```python + schedules = await scheduler_client.get_schedules_by_workflow("generate_report") + print(f"Found {len(schedules)} schedules for this workflow") + ``` + """ + return await self.get_all_schedules(workflow_name=workflow_name, **kwargs) + + async def get_active_schedules(self, **kwargs) -> List[WorkflowScheduleModelAdapter]: + """Get all active (non-paused) schedules. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of active WorkflowScheduleModelAdapter instances + + Example: + ```python + active = await scheduler_client.get_active_schedules() + print(f"{len(active)} schedules are currently running") + ``` + """ + all_schedules = await self.get_all_schedules(**kwargs) return [schedule for schedule in all_schedules if not schedule.paused] - async def get_paused_schedules(self) -> List[WorkflowScheduleModelAdapter]: - """Get all paused schedules""" - all_schedules = await self.get_all_schedules() + async def get_paused_schedules(self, **kwargs) -> List[WorkflowScheduleModelAdapter]: + """Get all paused schedules. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of paused WorkflowScheduleModelAdapter instances + + Example: + ```python + paused = await scheduler_client.get_paused_schedules() + print(f"{len(paused)} schedules are paused") + ``` + """ + all_schedules = await self.get_all_schedules(**kwargs) return [schedule for schedule in all_schedules if schedule.paused] - async def bulk_pause_schedules(self, schedule_names: List[str]) -> None: - """Pause multiple schedules in bulk""" + async def bulk_pause_schedules(self, schedule_names: List[str], **kwargs) -> None: + """Pause multiple schedules in bulk. + + Continues even if some pause operations fail. + + Args: + schedule_names: List of schedule names to pause + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + schedules = ["schedule1", "schedule2", "schedule3"] + await scheduler_client.bulk_pause_schedules(schedules) + ``` + """ for name in schedule_names: try: - await self.pause_schedule(name) + await self.pause_schedule_validated(name=name, **kwargs) except Exception: # noqa: PERF203 continue - async def bulk_resume_schedules(self, schedule_names: List[str]) -> None: - """Resume multiple schedules in bulk""" + async def bulk_resume_schedules(self, schedule_names: List[str], **kwargs) -> None: + """Resume multiple schedules in bulk. + + Continues even if some resume operations fail. + + Args: + schedule_names: List of schedule names to resume + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + schedules = ["schedule1", "schedule2", "schedule3"] + await scheduler_client.bulk_resume_schedules(schedules) + ``` + """ for name in schedule_names: try: - await self.resume_schedule(name) + await self.resume_schedule_validated(name=name, **kwargs) except Exception: # noqa: PERF203 continue - async def bulk_delete_schedules(self, schedule_names: List[str]) -> None: - """Delete multiple schedules in bulk""" + async def bulk_delete_schedules(self, schedule_names: List[str], **kwargs) -> None: + """Delete multiple schedules in bulk. + + Continues even if some delete operations fail. + + Args: + schedule_names: List of schedule names to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + old_schedules = ["old_schedule1", "old_schedule2"] + await scheduler_client.bulk_delete_schedules(old_schedules) + ``` + """ for name in schedule_names: try: - await self.delete_schedule(name) + await self.delete_schedule_validated(name=name, **kwargs) except Exception: # noqa: PERF203 continue - async def validate_cron_expression(self, cron_expression: str, limit: int = 5) -> List[int]: - """Validate a cron expression by getting its next execution times""" - return await self.get_next_few_schedules(cron_expression, limit=limit) + async def validate_cron_expression( + self, cron_expression: str, limit: int = 5, **kwargs + ) -> List[int]: + """Validate a cron expression by getting its next execution times. + + Args: + cron_expression: Cron expression to validate + limit: Number of execution times to return (default: 5) + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of next execution times as epoch milliseconds + + Example: + ```python + # Validate a cron expression and see when it will run + times = await scheduler_client.validate_cron_expression("0 9 * * MON-FRI") + from datetime import datetime + for timestamp in times: + dt = datetime.fromtimestamp(timestamp / 1000) + print(f"Will run at: {dt}") + ``` + """ + return await self.get_next_few_schedule_execution_times( + cron_expression=cron_expression, limit=limit, **kwargs + ) async def search_schedules_by_workflow( - self, workflow_name: str, start: int = 0, size: int = 100 + self, workflow_name: str, start: int = 0, size: int = 100, **kwargs ) -> SearchResultWorkflowScheduleExecutionModelAdapter: - """Search schedules for a specific workflow""" - return await self.search_schedules( - start=start, size=size, query=f"workflowName:{workflow_name}" + """Search schedules for a specific workflow. + + Args: + workflow_name: Name of the workflow to search for + start: Starting index for pagination (default: 0) + size: Number of results to return (default: 100) + **kwargs: Additional optional parameters to pass to the API + + Returns: + SearchResultWorkflowScheduleExecutionModelAdapter with matching schedules + + Example: + ```python + results = await scheduler_client.search_schedules_by_workflow("generate_report") + print(f"Found {results.total_hits} schedules") + ``` + """ + return await self.search_schedule_executions( + start=start, size=size, query=f"workflowName:{workflow_name}", **kwargs ) async def search_schedules_by_status( - self, paused: bool, start: int = 0, size: int = 100 + self, paused: bool, start: int = 0, size: int = 100, **kwargs ) -> SearchResultWorkflowScheduleExecutionModelAdapter: - """Search schedules by their status (paused/active)""" + """Search schedules by their status (paused/active). + + Args: + paused: If True, search for paused schedules. If False, search for active schedules + start: Starting index for pagination (default: 0) + size: Number of results to return (default: 100) + **kwargs: Additional optional parameters to pass to the API + + Returns: + SearchResultWorkflowScheduleExecutionModelAdapter with matching schedules + + Example: + ```python + # Get all paused schedules + paused_results = await scheduler_client.search_schedules_by_status(paused=True) + + # Get all active schedules + active_results = await scheduler_client.search_schedules_by_status(paused=False) + ``` + """ status_query = "paused:true" if paused else "paused:false" - return await self.search_schedules(start=start, size=size, query=status_query) + return await self.search_schedule_executions( + start=start, size=size, query=status_query, **kwargs + ) + + async def get_schedule_count(self, **kwargs) -> int: + """Get the total number of schedules. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Total count of schedules as integer - async def get_schedule_count(self) -> int: - """Get the total number of schedules""" - schedules = await self.get_all_schedules() + Example: + ```python + count = await scheduler_client.get_schedule_count() + print(f"Total schedules: {count}") + ``` + """ + schedules = await self.get_all_schedules(**kwargs) return len(schedules) - async def get_schedules_with_tag(self, tag_value: str) -> List[WorkflowScheduleModelAdapter]: - """Get schedules that have a specific tag (alias for get_schedules_by_tag)""" - return await self.get_schedules_by_tag(tag_value) + async def get_schedules_with_tag( + self, tag_value: str, **kwargs + ) -> List[WorkflowScheduleModelAdapter]: + """Get schedules that have a specific tag (alias for get_schedules_by_tag). + + Args: + tag_value: Tag value to filter by + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowScheduleModelAdapter instances with matching tag + + Example: + ```python + schedules = await scheduler_client.get_schedules_with_tag("production") + ``` + """ + return await self.get_schedules_by_tag(tag_value=tag_value, **kwargs) + + async def get_next_few_schedule_execution_times( + self, + cron_expression: str, + schedule_start_time: Optional[int] = None, + schedule_end_time: Optional[int] = None, + limit: Optional[int] = None, + ) -> List[int]: + """Get the next few execution times for a cron expression. + + Useful for validating cron expressions and previewing when schedules will run. + + Args: + cron_expression: Cron expression to evaluate + schedule_start_time: Optional start time in epoch milliseconds + schedule_end_time: Optional end time in epoch milliseconds + limit: Maximum number of execution times to return + + Returns: + List of execution times as epoch milliseconds + + Example: + ```python + from datetime import datetime + + # Get next 10 execution times for daily 9 AM schedule + times = await scheduler_client.get_next_few_schedule_execution_times( + cron_expression="0 9 * * *", + limit=10 + ) + + # Display the times + for timestamp in times: + dt = datetime.fromtimestamp(timestamp / 1000) + print(f"Will execute at: {dt}") + + # Get executions within a time window + import time + start = int(time.time() * 1000) # Now + end = start + (7 * 24 * 60 * 60 * 1000) # 7 days from now + times = await scheduler_client.get_next_few_schedule_execution_times( + cron_expression="0 */6 * * *", # Every 6 hours + schedule_start_time=start, + schedule_end_time=end + ) + ``` + """ + kwargs = {} + if schedule_start_time: + kwargs.update({"schedule_start_time": schedule_start_time}) + if schedule_end_time: + kwargs.update({"schedule_end_time": schedule_end_time}) + if limit: + kwargs.update({"limit": limit}) + return await self._scheduler_api.get_next_few_schedules( + cron_expression=cron_expression, **kwargs + ) diff --git a/src/conductor/asyncio_client/orkes/orkes_schema_client.py b/src/conductor/asyncio_client/orkes/orkes_schema_client.py index 06d4792e3..65ed02d76 100644 --- a/src/conductor/asyncio_client/orkes/orkes_schema_client.py +++ b/src/conductor/asyncio_client/orkes/orkes_schema_client.py @@ -2,6 +2,9 @@ from typing import List, Optional +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated + from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.schema_def_adapter import SchemaDefAdapter from conductor.asyncio_client.configuration.configuration import Configuration @@ -10,53 +13,276 @@ class OrkesSchemaClient(OrkesBaseClient): def __init__(self, configuration: Configuration, api_client: ApiClient): + """Initialize the OrkesSchemaClient with configuration and API client. + + Args: + configuration: Configuration object containing server settings and authentication + api_client: ApiClient instance for making API requests + + Example: + ```python + from conductor.asyncio_client.configuration.configuration import Configuration + from conductor.asyncio_client.adapters import ApiClient + + config = Configuration(server_api_url="http://localhost:8080/api") + api_client = ApiClient(configuration=config) + schema_client = OrkesSchemaClient(config, api_client) + ``` + """ super().__init__(configuration, api_client) # Core Schema Operations + @deprecated("save_schemas is deprecated; use register_schema instead") + @typing_deprecated("save_schemas is deprecated; use register_schema instead") async def save_schemas( self, schema_defs: List[SchemaDefAdapter], new_version: Optional[bool] = None ) -> None: - """Save one or more schema definitions""" - await self.schema_api.save(schema_defs, new_version=new_version) + """Save one or more schema definitions. + + .. deprecated:: + Use register_schemas instead for consistent API interface. + + Args: + schema_defs: List of schema definitions to save + new_version: If True, create new versions of existing schemas + + Returns: + None + Example: + ```python + from conductor.asyncio_client.adapters.models.schema_def_adapter import SchemaDefAdapter + + schemas = [ + SchemaDefAdapter(name="user_schema", version=1, data={"type": "object"}), + SchemaDefAdapter(name="order_schema", version=1, data={"type": "object"}) + ] + await schema_client.save_schemas(schemas) + ``` + """ + await self._schema_api.save(schema_defs, new_version=new_version) + + async def register_schemas( + self, schema_defs: List[SchemaDefAdapter], new_version: Optional[bool] = None, **kwargs + ) -> None: + """Register one or more schema definitions. + + Schema definitions define data structures and validation rules for workflow inputs/outputs. + + Args: + schema_defs: List of schema definitions to register + new_version: If True, create new versions of existing schemas + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.schema_def_adapter import SchemaDefAdapter + + # Register JSON schemas for data validation + schemas = [ + SchemaDefAdapter( + name="user_schema", + version=1, + type="JSON", + data={ + "type": "object", + "properties": { + "name": {"type": "string"}, + "email": {"type": "string", "format": "email"} + }, + "required": ["name", "email"] + } + ) + ] + await schema_client.register_schemas(schemas) + ``` + """ + await self._schema_api.save(schema_def=schema_defs, new_version=new_version, **kwargs) + + @deprecated("save_schema is deprecated; use register_schema instead") + @typing_deprecated("save_schema is deprecated; use register_schema instead") async def save_schema( self, schema_def: SchemaDefAdapter, new_version: Optional[bool] = None ) -> None: - """Save a single schema definition""" - await self.save_schemas([schema_def], new_version=new_version) + """Save a single schema definition. + + .. deprecated:: + Use register_schema instead for consistent API interface. - async def get_schema(self, name: str, version: int) -> SchemaDefAdapter: - """Get a specific schema by name and version""" - return await self.schema_api.get_schema_by_name_and_version(name, version) + Args: + schema_def: Schema definition to save + new_version: If True, create a new version of existing schema - async def get_all_schemas(self) -> List[SchemaDefAdapter]: - """Get all schema definitions""" - return await self.schema_api.get_all_schemas() + Returns: + None - async def delete_schema_by_name(self, name: str) -> None: - """Delete all versions of a schema by name""" - await self.schema_api.delete_schema_by_name(name) + Example: + ```python + from conductor.asyncio_client.adapters.models.schema_def_adapter import SchemaDefAdapter + + schema = SchemaDefAdapter(name="user_schema", version=1, data={"type": "object"}) + await schema_client.save_schema(schema) + ``` + """ + await self.save_schemas([schema_def], new_version=new_version) + + async def register_schema( + self, schema_def: SchemaDefAdapter, new_version: Optional[bool] = None, **kwargs + ) -> None: + """Register a single schema definition. + + Args: + schema_def: Schema definition to register + new_version: If True, create a new version of existing schema + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.schema_def_adapter import SchemaDefAdapter + + schema = SchemaDefAdapter( + name="order_schema", + version=1, + type="JSON", + data={ + "type": "object", + "properties": { + "order_id": {"type": "string"}, + "items": {"type": "array"}, + "total": {"type": "number"} + } + } + ) + await schema_client.register_schema(schema) + ``` + """ + await self.register_schemas(schema_defs=[schema_def], new_version=new_version, **kwargs) + + async def get_schema(self, name: str, version: int, **kwargs) -> SchemaDefAdapter: + """Get a specific schema by name and version. + + Args: + name: Name of the schema + version: Version number + **kwargs: Additional optional parameters to pass to the API + + Returns: + SchemaDefAdapter instance containing the schema definition + + Example: + ```python + schema = await schema_client.get_schema("user_schema", version=1) + print(f"Schema type: {schema.type}") + print(f"Schema data: {schema.data}") + ``` + """ + return await self._schema_api.get_schema_by_name_and_version( + name=name, version=version, **kwargs + ) - async def delete_schema_by_name_and_version(self, name: str, version: int) -> None: - """Delete a specific version of a schema""" - await self.schema_api.delete_schema_by_name_and_version(name, version) + async def get_all_schemas(self, **kwargs) -> List[SchemaDefAdapter]: + """Get all schema definitions. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of all SchemaDefAdapter instances + + Example: + ```python + schemas = await schema_client.get_all_schemas() + for schema in schemas: + print(f"Schema: {schema.name} v{schema.version}") + ``` + """ + return await self._schema_api.get_all_schemas(**kwargs) + + async def delete_schema_by_name(self, name: str, **kwargs) -> None: + """Delete all versions of a schema by name. + + Args: + name: Name of the schema to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Delete all versions of a schema + await schema_client.delete_schema_by_name("old_user_schema") + ``` + """ + await self._schema_api.delete_schema_by_name(name=name, **kwargs) + + async def delete_schema_by_name_and_version(self, name: str, version: int, **kwargs) -> None: + """Delete a specific version of a schema. + + Args: + name: Name of the schema + version: Version number to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Delete only version 1 of the schema + await schema_client.delete_schema_by_name_and_version("user_schema", 1) + ``` + """ + await self._schema_api.delete_schema_by_name_and_version( + name=name, version=version, **kwargs + ) # Convenience Methods async def create_schema( - self, - name: str, - version: int, - schema_definition: dict, - schema_type: str = "JSON", + self, name: str, version: int, schema_definition: dict, schema_type: str = "JSON", **kwargs ) -> None: - """Create a new schema with simplified parameters""" + """Create a new schema with simplified parameters. + + Convenience method for creating schemas from dictionary definitions. + + Args: + name: Name of the schema + version: Version number + schema_definition: Schema data as dictionary (e.g., JSON Schema) + schema_type: Type of schema (default: "JSON") + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Create a JSON schema for user data + schema_def = { + "type": "object", + "properties": { + "user_id": {"type": "string"}, + "name": {"type": "string"}, + "age": {"type": "integer", "minimum": 0} + }, + "required": ["user_id", "name"] + } + await schema_client.create_schema("user_schema", 1, schema_def) + ``` + """ schema_def = SchemaDefAdapter( name=name, version=version, data=schema_definition, type=schema_type, ) - await self.save_schema(schema_def) + await self.register_schema(schema_def=schema_def, **kwargs) async def update_schema( self, @@ -65,27 +291,91 @@ async def update_schema( schema_definition: dict, schema_type: str = "JSON", create_new_version: bool = False, + **kwargs, ) -> None: - """Update an existing schema""" + """Update an existing schema. + + Args: + name: Name of the schema + version: Version number + schema_definition: Updated schema data as dictionary + schema_type: Type of schema (default: "JSON") + create_new_version: If True, create a new version instead of overwriting + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Update existing schema + updated_schema = { + "type": "object", + "properties": { + "user_id": {"type": "string"}, + "name": {"type": "string"}, + "email": {"type": "string", "format": "email"} + } + } + await schema_client.update_schema("user_schema", 1, updated_schema) + + # Create new version + await schema_client.update_schema( + "user_schema", 2, updated_schema, create_new_version=True + ) + ``` + """ schema_def = SchemaDefAdapter( name=name, version=version, data=schema_definition, type=schema_type, ) - await self.save_schema(schema_def, new_version=create_new_version) - - async def schema_exists(self, name: str, version: int) -> bool: - """Check if a specific schema version exists""" + await self.register_schema(schema_def=schema_def, new_version=create_new_version, **kwargs) + + async def schema_exists(self, name: str, version: int, **kwargs) -> bool: + """Check if a specific schema version exists. + + Args: + name: Name of the schema + version: Version number to check + **kwargs: Additional optional parameters to pass to the API + + Returns: + True if schema exists, False otherwise + + Example: + ```python + if await schema_client.schema_exists("user_schema", 1): + print("Schema version 1 exists") + else: + print("Schema version 1 not found") + ``` + """ try: - await self.get_schema(name, version) + await self.get_schema(name=name, version=version, **kwargs) return True except Exception: return False - async def get_latest_schema_version(self, name: str) -> Optional[SchemaDefAdapter]: - """Get the latest version of a schema by name""" - all_schemas = await self.get_all_schemas() + async def get_latest_schema_version(self, name: str, **kwargs) -> Optional[SchemaDefAdapter]: + """Get the latest version of a schema by name. + + Args: + name: Name of the schema + **kwargs: Additional optional parameters to pass to the API + + Returns: + SchemaDefAdapter instance for the latest version, or None if not found + + Example: + ```python + latest = await schema_client.get_latest_schema_version("user_schema") + if latest: + print(f"Latest version: {latest.version}") + ``` + """ + all_schemas = await self.get_all_schemas(**kwargs) matching_schemas = [schema for schema in all_schemas if schema.name == name] if not matching_schemas: @@ -94,9 +384,23 @@ async def get_latest_schema_version(self, name: str) -> Optional[SchemaDefAdapte # Find the schema with the highest version number return max(matching_schemas, key=lambda schema: schema.version or 0) - async def get_schema_versions(self, name: str) -> List[int]: - """Get all version numbers for a schema""" - all_schemas = await self.get_all_schemas() + async def get_schema_versions(self, name: str, **kwargs) -> List[int]: + """Get all version numbers for a schema. + + Args: + name: Name of the schema + **kwargs: Additional optional parameters to pass to the API + + Returns: + Sorted list of version numbers + + Example: + ```python + versions = await schema_client.get_schema_versions("user_schema") + print(f"Available versions: {versions}") # [1, 2, 3] + ``` + """ + all_schemas = await self.get_all_schemas(**kwargs) versions = [ schema.version for schema in all_schemas @@ -104,26 +408,97 @@ async def get_schema_versions(self, name: str) -> List[int]: ] return sorted(versions) - async def get_schemas_by_name(self, name: str) -> List[SchemaDefAdapter]: - """Get all versions of a schema by name""" - all_schemas = await self.get_all_schemas() + async def get_schemas_by_name(self, name: str, **kwargs) -> List[SchemaDefAdapter]: + """Get all versions of a schema by name. + + Args: + name: Name of the schema + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of SchemaDefAdapter instances for all versions + + Example: + ```python + versions = await schema_client.get_schemas_by_name("user_schema") + for schema in versions: + print(f"Version {schema.version}: {schema.data}") + ``` + """ + all_schemas = await self.get_all_schemas(**kwargs) return [schema for schema in all_schemas if schema.name == name] - async def get_schema_count(self) -> int: - """Get the total number of schema definitions""" - schemas = await self.get_all_schemas() + async def get_schema_count(self, **kwargs) -> int: + """Get the total number of schema definitions. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Total count of schemas as integer + + Example: + ```python + count = await schema_client.get_schema_count() + print(f"Total schemas: {count}") + ``` + """ + schemas = await self.get_all_schemas(**kwargs) return len(schemas) - async def get_unique_schema_names(self) -> List[str]: - """Get a list of unique schema names""" - all_schemas = await self.get_all_schemas() + async def get_unique_schema_names(self, **kwargs) -> List[str]: + """Get a list of unique schema names. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Sorted list of unique schema names + + Example: + ```python + names = await schema_client.get_unique_schema_names() + print(f"Schema names: {names}") + ``` + """ + all_schemas = await self.get_all_schemas(**kwargs) names = {schema.name for schema in all_schemas if schema.name} return sorted(names) async def bulk_save_schemas( - self, schemas: List[dict], new_version: Optional[bool] = None + self, schemas: List[dict], new_version: Optional[bool] = None, **kwargs ) -> None: - """Save multiple schemas from dictionary definitions""" + """Save multiple schemas from dictionary definitions. + + Convenience method for bulk schema creation from dictionaries. + + Args: + schemas: List of schema dictionaries with keys: name, version, data, type + new_version: If True, create new versions of existing schemas + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + schemas = [ + { + "name": "user_schema", + "version": 1, + "data": {"type": "object", "properties": {"name": {"type": "string"}}}, + "type": "JSON" + }, + { + "name": "order_schema", + "version": 1, + "data": {"type": "object", "properties": {"order_id": {"type": "string"}}}, + "type": "JSON" + } + ] + await schema_client.bulk_save_schemas(schemas) + ``` + """ schema_defs = [] for schema_dict in schemas: schema_def = SchemaDefAdapter( @@ -134,17 +509,33 @@ async def bulk_save_schemas( ) schema_defs.append(schema_def) - await self.save_schemas(schema_defs, new_version=new_version) + await self.register_schemas(schema_defs=schema_defs, new_version=new_version, **kwargs) async def clone_schema( - self, - source_name: str, - source_version: int, - target_name: str, - target_version: int, + self, source_name: str, source_version: int, target_name: str, target_version: int, **kwargs ) -> None: - """Clone an existing schema to a new name/version""" - source_schema = await self.get_schema(source_name, source_version) + """Clone an existing schema to a new name/version. + + Args: + source_name: Name of the source schema + source_version: Version of the source schema + target_name: Name for the cloned schema + target_version: Version for the cloned schema + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Clone user_schema v1 as customer_schema v1 + await schema_client.clone_schema( + "user_schema", 1, + "customer_schema", 1 + ) + ``` + """ + source_schema = await self.get_schema(name=source_name, version=source_version, **kwargs) cloned_schema = SchemaDefAdapter( name=target_name, @@ -153,38 +544,112 @@ async def clone_schema( type=source_schema.type, ) - await self.save_schema(cloned_schema) + await self.register_schema(schema_def=cloned_schema, **kwargs) + + async def delete_all_schema_versions(self, name: str, **kwargs) -> None: + """Delete all versions of a schema (alias for delete_schema_by_name). + + Args: + name: Name of the schema + **kwargs: Additional optional parameters to pass to the API - async def delete_all_schema_versions(self, name: str) -> None: - """Delete all versions of a schema (alias for delete_schema_by_name)""" - await self.delete_schema_by_name(name) + Returns: + None - async def search_schemas_by_name(self, name_pattern: str) -> List[SchemaDefAdapter]: - """Search schemas by name pattern (case-insensitive)""" - all_schemas = await self.get_all_schemas() + Example: + ```python + await schema_client.delete_all_schema_versions("old_schema") + ``` + """ + await self.delete_schema_by_name(name=name, **kwargs) + + async def search_schemas_by_name(self, name_pattern: str, **kwargs) -> List[SchemaDefAdapter]: + """Search schemas by name pattern (case-insensitive). + + Args: + name_pattern: Pattern to search for in schema names + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of SchemaDefAdapter instances matching the pattern + + Example: + ```python + # Find all schemas with "user" in the name + schemas = await schema_client.search_schemas_by_name("user") + for schema in schemas: + print(f"Found: {schema.name} v{schema.version}") + ``` + """ + all_schemas = await self.get_all_schemas(**kwargs) return [ schema for schema in all_schemas if name_pattern.lower() in (schema.name or "").lower() ] async def get_schemas_with_external_ref( - self, external_ref_pattern: str + self, external_ref_pattern: str, **kwargs ) -> List[SchemaDefAdapter]: - """Find schemas that contain a specific text in their external ref""" - all_schemas = await self.get_all_schemas() + """Find schemas that contain a specific text in their external ref. + + Args: + external_ref_pattern: Pattern to search for in external references + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of SchemaDefAdapter instances with matching external ref + + Example: + ```python + # Find schemas referencing a specific URL + schemas = await schema_client.get_schemas_with_external_ref("https://example.com") + ``` + """ + all_schemas = await self.get_all_schemas(**kwargs) return [ schema for schema in all_schemas if schema.external_ref and external_ref_pattern.lower() in schema.external_ref.lower() ] - async def validate_schema_structure(self, schema_definition: dict) -> bool: - """Basic validation to check if schema definition has required structure""" + async def validate_schema_structure(self, schema_definition: dict, **kwargs) -> bool: + """Basic validation to check if schema definition has required structure. + + Args: + schema_definition: Schema data to validate + **kwargs: Additional optional parameters + + Returns: + True if schema is valid (non-empty dict), False otherwise + + Example: + ```python + schema_def = {"type": "object", "properties": {}} + is_valid = await schema_client.validate_schema_structure(schema_def) + if is_valid: + print("Schema structure is valid") + ``` + """ # This is a basic validation - you might want to add more sophisticated JSON schema validation return isinstance(schema_definition, dict) and len(schema_definition) > 0 - async def get_schema_statistics(self) -> dict: - """Get comprehensive statistics about schemas""" - all_schemas = await self.get_all_schemas() + async def get_schema_statistics(self, **kwargs) -> dict: + """Get comprehensive statistics about schemas. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary containing schema statistics + + Example: + ```python + stats = await schema_client.get_schema_statistics() + print(f"Total schemas: {stats['total_schemas']}") + print(f"Unique names: {stats['unique_schema_names']}") + print(f"Version counts: {stats['version_counts']}") + ``` + """ + all_schemas = await self.get_all_schemas(**kwargs) unique_names = set() version_counts: dict[str, int] = {} @@ -203,23 +668,86 @@ async def get_schema_statistics(self) -> dict: } # Legacy compatibility methods (aliasing new method names to match the original draft) - async def list_schemas(self) -> List[SchemaDefAdapter]: - """Legacy method: Get all schema definitions""" - return await self.get_all_schemas() - - async def delete_schema(self, name: str, version: Optional[int] = None) -> None: - """Legacy method: Delete a schema (by name only or by name and version)""" + async def list_schemas(self, **kwargs) -> List[SchemaDefAdapter]: + """Get all schema definitions (legacy alias). + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of all SchemaDefAdapter instances + + Example: + ```python + schemas = await schema_client.list_schemas() + ``` + """ + return await self.get_all_schemas(**kwargs) + + async def delete_schema(self, name: str, version: Optional[int] = None, **kwargs) -> None: + """Delete a schema (by name only or by name and version). + + Args: + name: Name of the schema to delete + version: Optional version number. If None, deletes all versions + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Delete all versions + await schema_client.delete_schema("old_schema") + + # Delete specific version + await schema_client.delete_schema("user_schema", version=1) + ``` + """ if version is not None: - await self.delete_schema_by_name_and_version(name, version) + await self.delete_schema_by_name_and_version(name=name, version=version, **kwargs) else: - await self.delete_schema_by_name(name) + await self.delete_schema_by_name(name=name, **kwargs) async def create_schema_version( - self, name: str, schema_definition: dict, schema_type: str = "JSON" + self, name: str, schema_definition: dict, schema_type: str = "JSON", **kwargs ) -> None: - """Create a new version of an existing schema""" + """Create a new version of an existing schema. + + Automatically increments the version number based on existing versions. + + Args: + name: Name of the schema + schema_definition: Schema data as dictionary + schema_type: Type of schema (default: "JSON") + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Automatically creates the next version + new_schema_data = { + "type": "object", + "properties": { + "user_id": {"type": "string"}, + "name": {"type": "string"}, + "email": {"type": "string"} + } + } + await schema_client.create_schema_version("user_schema", new_schema_data) + # If user_schema has versions 1,2,3, this creates version 4 + ``` + """ # Get the highest version number for this schema - versions = await self.get_schema_versions(name) + versions = await self.get_schema_versions(name=name, **kwargs) new_version = max(versions) + 1 if versions else 1 - await self.create_schema(name, new_version, schema_definition, schema_type) + await self.create_schema( + name=name, + version=new_version, + schema_definition=schema_definition, + schema_type=schema_type, + **kwargs, + ) diff --git a/src/conductor/asyncio_client/orkes/orkes_secret_client.py b/src/conductor/asyncio_client/orkes/orkes_secret_client.py index d465824a0..12866e0ba 100644 --- a/src/conductor/asyncio_client/orkes/orkes_secret_client.py +++ b/src/conductor/asyncio_client/orkes/orkes_secret_client.py @@ -2,6 +2,9 @@ from typing import Dict, List +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated + from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.extended_secret_adapter import ExtendedSecretAdapter from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter @@ -11,71 +14,451 @@ class OrkesSecretClient(OrkesBaseClient): def __init__(self, configuration: Configuration, api_client: ApiClient): + """Initialize the OrkesSecretClient with configuration and API client. + + Args: + configuration: Configuration object containing server settings and authentication + api_client: ApiClient instance for making API requests + + Example: + ```python + from conductor.asyncio_client.configuration.configuration import Configuration + from conductor.asyncio_client.adapters import ApiClient + + config = Configuration(server_api_url="http://localhost:8080/api") + api_client = ApiClient(configuration=config) + secret_client = OrkesSecretClient(config, api_client) + ``` + """ super().__init__(configuration, api_client) # Core Secret Operations + @deprecated("put_secret is deprecated; use put_secret_validated instead") + @typing_deprecated("put_secret is deprecated; use put_secret_validated instead") async def put_secret(self, key: str, secret: str) -> object: - """Store a secret value by key""" - return await self.secret_api.put_secret(key, secret) + """Store a secret value by key. + + .. deprecated:: + Use put_secret_validated instead for type-safe validated responses. + + Args: + key: Unique key for the secret + secret: Secret value to store + + Returns: + Raw response object from the API + + Example: + ```python + await secret_client.put_secret("db_password", "mysecretpassword123") + ``` + """ + return await self._secret_api.put_secret(key, secret) + + async def put_secret_validated(self, key: str, secret: str, **kwargs) -> None: + """Store a secret value by key. + + Args: + key: Unique key for the secret + secret: Secret value to store + **kwargs: Additional optional parameters to pass to the API + + Returns: + None - async def get_secret(self, key: str) -> str: - """Get a secret value by key""" - return await self.secret_api.get_secret(key) + Example: + ```python + # Store database credentials + await secret_client.put_secret_validated("db_password", "mysecretpassword123") + # Store API keys + await secret_client.put_secret_validated("openai_api_key", "sk-...") + ``` + """ + await self._secret_api.put_secret(key=key, body=secret, **kwargs) + + async def get_secret(self, key: str, **kwargs) -> str: + """Get a secret value by key. + + Args: + key: Key of the secret to retrieve + **kwargs: Additional optional parameters to pass to the API + + Returns: + Secret value as string + + Example: + ```python + password = await secret_client.get_secret("db_password") + # Use password in workflow tasks + ``` + """ + return await self._secret_api.get_secret(key=key, **kwargs) + + @deprecated("delete_secret is deprecated; use delete_secret_validated instead") + @typing_deprecated("delete_secret is deprecated; use delete_secret_validated instead") async def delete_secret(self, key: str) -> object: - """Delete a secret by key""" - return await self.secret_api.delete_secret(key) + """Delete a secret by key. + + .. deprecated:: + Use delete_secret_validated instead for type-safe validated responses. + + Args: + key: Key of the secret to delete + + Returns: + Raw response object from the API + + Example: + ```python + await secret_client.delete_secret("old_api_key") + ``` + """ + return await self._secret_api.delete_secret(key) + + async def delete_secret_validated(self, key: str, **kwargs) -> None: + """Delete a secret by key. + + Args: + key: Key of the secret to delete + **kwargs: Additional optional parameters to pass to the API - async def secret_exists(self, key: str) -> bool: - """Check if a secret exists by key""" - return await self.secret_api.secret_exists(key) + Returns: + None + + Example: + ```python + await secret_client.delete_secret_validated("old_api_key") + ``` + """ + await self._secret_api.delete_secret(key=key, **kwargs) + + @deprecated("secret_exists is deprecated; use secret_exists_validated instead") + @typing_deprecated("secret_exists is deprecated; use secret_exists_validated instead") + async def secret_exists(self, key: str) -> object: + """Check if a secret exists by key. + + .. deprecated:: + Use secret_exists_validated instead for type-safe validated responses. + + Args: + key: Key of the secret to check + + Returns: + Raw response object from the API + + Example: + ```python + await secret_client.secret_exists("db_password") + ``` + """ + return await self._secret_api.secret_exists(key) + + async def secret_exists_validated(self, key: str, **kwargs) -> bool: + """Check if a secret exists by key. + + Args: + key: Key of the secret to check + **kwargs: Additional optional parameters to pass to the API + + Returns: + True if secret exists, False otherwise + + Example: + ```python + if await secret_client.secret_exists_validated("db_password"): + print("Secret exists") + else: + print("Secret not found") + ``` + """ + result = await self._secret_api.secret_exists(key=key, **kwargs) + return bool(result) # Secret Listing Operations - async def list_all_secret_names(self) -> List[str]: - """List all secret names (keys)""" - return await self.secret_api.list_all_secret_names() + async def list_all_secret_names(self, **kwargs) -> List[str]: + """List all secret names (keys). + + Args: + **kwargs: Additional optional parameters to pass to the API - async def list_secrets_that_user_can_grant_access_to(self) -> List[str]: - """List secrets that the current user can grant access to""" - return await self.secret_api.list_secrets_that_user_can_grant_access_to() + Returns: + List of secret key names + + Example: + ```python + secrets = await secret_client.list_all_secret_names() + for secret_key in secrets: + print(f"Secret: {secret_key}") + ``` + """ + return await self._secret_api.list_all_secret_names(**kwargs) + + async def list_secrets_that_user_can_grant_access_to(self, **kwargs) -> List[str]: + """List secrets that the current user can grant access to. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of secret keys the user can grant access to + + Example: + ```python + grantable = await secret_client.list_secrets_that_user_can_grant_access_to() + print(f"You can grant access to {len(grantable)} secrets") + ``` + """ + return await self._secret_api.list_secrets_that_user_can_grant_access_to(**kwargs) async def list_secrets_with_tags_that_user_can_grant_access_to( - self, + self, **kwargs ) -> List[ExtendedSecretAdapter]: - """List secrets with tags that the current user can grant access to""" - return await self.secret_api.list_secrets_with_tags_that_user_can_grant_access_to() + """List secrets with tags that the current user can grant access to. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of ExtendedSecretAdapter instances with tag information + + Example: + ```python + secrets = await secret_client.list_secrets_with_tags_that_user_can_grant_access_to() + for secret in secrets: + print(f"Secret: {secret.key}, Tags: {secret.tags}") + ``` + """ + return await self._secret_api.list_secrets_with_tags_that_user_can_grant_access_to(**kwargs) # Tag Management Operations - async def put_tag_for_secret(self, key: str, tags: List[TagAdapter]) -> None: - """Add tags to a secret""" - await self.secret_api.put_tag_for_secret(key, tags) + async def put_tag_for_secret(self, key: str, tags: List[TagAdapter], **kwargs) -> None: + """Add tags to a secret. + + Args: + key: Key of the secret + tags: List of tags to add + **kwargs: Additional optional parameters to pass to the API + + Returns: + None - async def get_tags(self, key: str) -> List[TagAdapter]: - """Get tags for a secret""" - return await self.secret_api.get_tags(key) + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter - async def delete_tag_for_secret(self, key: str, tags: List[TagAdapter]) -> None: - """Remove tags from a secret""" - await self.secret_api.delete_tag_for_secret(key, tags) + tags = [ + TagAdapter(key="environment", value="production"), + TagAdapter(key="team", value="platform") + ] + await secret_client.put_tag_for_secret("db_password", tags) + ``` + """ + await self._secret_api.put_tag_for_secret(key=key, tag=tags, **kwargs) + + async def get_tags(self, key: str, **kwargs) -> List[TagAdapter]: + """Get tags for a secret. + + Args: + key: Key of the secret + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TagAdapter instances + + Example: + ```python + tags = await secret_client.get_tags("db_password") + for tag in tags: + print(f"{tag.key}: {tag.value}") + ``` + """ + return await self._secret_api.get_tags(key=key, **kwargs) + + async def delete_tag_for_secret(self, key: str, tags: List[TagAdapter], **kwargs) -> None: + """Remove tags from a secret. + + Args: + key: Key of the secret + tags: List of tags to remove + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter + + tags = [TagAdapter(key="environment", value="production")] + await secret_client.delete_tag_for_secret("db_password", tags) + ``` + """ + await self._secret_api.delete_tag_for_secret(key=key, tag=tags, **kwargs) # Cache Operations + @deprecated("clear_local_cache is deprecated; use clear_local_cache_validated instead") + @typing_deprecated("clear_local_cache is deprecated; use clear_local_cache_validated instead") async def clear_local_cache(self) -> Dict[str, str]: - """Clear local cache""" - return await self.secret_api.clear_local_cache() + """Clear local secret cache. + + .. deprecated:: + Use clear_local_cache_validated instead for type-safe validated responses. + + Returns: + Dictionary with cache clear results + Example: + ```python + await secret_client.clear_local_cache() + ``` + """ + return await self._secret_api.clear_local_cache() + + async def clear_local_cache_validated(self, **kwargs) -> None: + """Clear local secret cache. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Clear cache after updating secrets + await secret_client.clear_local_cache_validated() + ``` + """ + await self._secret_api.clear_local_cache(**kwargs) + + @deprecated("clear_redis_cache is deprecated; use clear_redis_cache_validated instead") + @typing_deprecated("clear_redis_cache is deprecated; use clear_redis_cache_validated instead") async def clear_redis_cache(self) -> Dict[str, str]: - """Clear Redis cache""" - return await self.secret_api.clear_redis_cache() + """Clear Redis secret cache. + + .. deprecated:: + Use clear_redis_cache_validated instead for type-safe validated responses. + + Returns: + Dictionary with cache clear results + + Example: + ```python + await secret_client.clear_redis_cache() + ``` + """ + return await self._secret_api.clear_redis_cache() + + async def clear_redis_cache_validated(self, **kwargs) -> None: + """Clear Redis secret cache. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Clear Redis cache after updating secrets + await secret_client.clear_redis_cache_validated() + ``` + """ + await self._secret_api.clear_redis_cache(**kwargs) # Convenience Methods - async def list_secrets(self) -> List[str]: - """Alias for list_all_secret_names for backward compatibility""" - return await self.list_all_secret_names() + async def list_secrets(self, **kwargs) -> List[str]: + """List all secret names (alias for list_all_secret_names). + + Args: + **kwargs: Additional optional parameters to pass to the API + Returns: + List of secret key names + + Example: + ```python + secrets = await secret_client.list_secrets() + ``` + """ + return await self.list_all_secret_names(**kwargs) + + @deprecated("update_secret is deprecated; use update_secret_validated instead") + @typing_deprecated("update_secret is deprecated; use update_secret_validated instead") async def update_secret(self, key: str, secret: str) -> object: - """Alias for put_secret for consistency with other clients""" + """Update a secret value (alias for put_secret). + + .. deprecated:: + Use update_secret_validated instead for type-safe validated responses. + + Args: + key: Key of the secret to update + secret: New secret value + + Returns: + Raw response object from the API + + Example: + ```python + await secret_client.update_secret("api_key", "new_value") + ``` + """ return await self.put_secret(key, secret) - async def has_secret(self, key: str) -> bool: - """Alias for secret_exists for consistency""" + async def update_secret_validated(self, key: str, secret: str, **kwargs) -> None: + """Update a secret value (alias for put_secret_validated). + + Args: + key: Key of the secret to update + secret: New secret value + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await secret_client.update_secret_validated("api_key", "new_value") + ``` + """ + await self.put_secret_validated(key=key, secret=secret, **kwargs) + + @deprecated("has_secret is deprecated; use has_secret_validated instead") + @typing_deprecated("has_secret is deprecated; use has_secret_validated instead") + async def has_secret(self, key: str) -> object: + """Check if a secret exists (alias for secret_exists). + + .. deprecated:: + Use has_secret_validated instead for type-safe validated responses. + + Args: + key: Key of the secret to check + + Returns: + Raw response object from the API + + Example: + ```python + await secret_client.has_secret("db_password") + ``` + """ return await self.secret_exists(key) + + async def has_secret_validated(self, key: str, **kwargs) -> bool: + """Check if a secret exists (alias for secret_exists_validated). + + Args: + key: Key of the secret to check + **kwargs: Additional optional parameters to pass to the API + + Returns: + True if secret exists, False otherwise + + Example: + ```python + if await secret_client.has_secret_validated("db_password"): + print("Password is configured") + ``` + """ + return await self.secret_exists_validated(key=key, **kwargs) diff --git a/src/conductor/asyncio_client/orkes/orkes_task_client.py b/src/conductor/asyncio_client/orkes/orkes_task_client.py index a5f2bfe84..5a73cc648 100644 --- a/src/conductor/asyncio_client/orkes/orkes_task_client.py +++ b/src/conductor/asyncio_client/orkes/orkes_task_client.py @@ -1,6 +1,9 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, cast + +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.poll_data_adapter import PollDataAdapter @@ -17,15 +20,82 @@ class OrkesTaskClient(OrkesBaseClient): def __init__(self, configuration: Configuration, api_client: ApiClient): + """Initialize the OrkesTaskClient with configuration and API client. + + Args: + configuration: Configuration object containing server settings and authentication + api_client: ApiClient instance for making API requests + + Example: + ```python + from conductor.asyncio_client.configuration.configuration import Configuration + from conductor.asyncio_client.adapters import ApiClient + + config = Configuration(server_api_url="http://localhost:8080/api") + api_client = ApiClient(configuration=config) + task_client = OrkesTaskClient(config, api_client) + ``` + """ super().__init__(configuration, api_client) # Task Polling Operations + @deprecated("poll_for_task is deprecated; use poll_task instead") + @typing_deprecated("poll_for_task is deprecated; use poll_task instead") async def poll_for_task( self, task_type: str, worker_id: Optional[str] = None, domain: Optional[str] = None ) -> Optional[TaskAdapter]: - """Poll for a single task of a certain type""" - return await self.task_api.poll(tasktype=task_type, workerid=worker_id, domain=domain) + """Poll for a single task of a certain type. + + .. deprecated:: + Use poll_task instead for consistent API interface. + + Args: + task_type: Type of task to poll for + worker_id: Optional worker ID for tracking + domain: Optional domain for task isolation + + Returns: + TaskAdapter instance if a task is available, None otherwise + + Example: + ```python + task = await task_client.poll_for_task("process_order", worker_id="worker-1") + ``` + """ + return await self._task_api.poll(tasktype=task_type, workerid=worker_id, domain=domain) + + async def poll_task( + self, + task_type: str, + worker_id: Optional[str] = None, + domain: Optional[str] = None, + **kwargs, + ) -> Optional[TaskAdapter]: + """Poll for a single task of a certain type. + Args: + task_type: Type of task to poll for + worker_id: Optional worker ID for tracking + domain: Optional domain for task isolation + **kwargs: Additional optional parameters to pass to the API + + Returns: + TaskAdapter instance if a task is available, None otherwise + + Example: + ```python + task = await task_client.poll_task("process_order", worker_id="worker-1") + if task: + print(f"Got task: {task.task_id}") + # Process the task + ``` + """ + return await self._task_api.poll( + tasktype=task_type, workerid=worker_id, domain=domain, **kwargs + ) + + @deprecated("poll_for_task_batch is deprecated; use batch_poll_tasks instead") + @typing_deprecated("poll_for_task_batch is deprecated; use batch_poll_tasks instead") async def poll_for_task_batch( self, task_type: str, @@ -34,8 +104,27 @@ async def poll_for_task_batch( timeout: int = 100, domain: Optional[str] = None, ) -> List[TaskAdapter]: - """Poll for multiple tasks in batch""" - return await self.task_api.batch_poll( + """Poll for multiple tasks in batch. + + .. deprecated:: + Use batch_poll_tasks instead for consistent API interface. + + Args: + task_type: Type of task to poll for + worker_id: Optional worker ID for tracking + count: Number of tasks to poll for (default: 1) + timeout: Timeout in milliseconds (default: 100) + domain: Optional domain for task isolation + + Returns: + List of TaskAdapter instances + + Example: + ```python + tasks = await task_client.poll_for_task_batch("process_order", count=5) + ``` + """ + return await self._task_api.batch_poll( tasktype=task_type, workerid=worker_id, count=count, @@ -43,14 +132,93 @@ async def poll_for_task_batch( domain=domain, ) + async def batch_poll_tasks( + self, + task_type: str, + worker_id: Optional[str] = None, + count: int = 1, + timeout: int = 100, + domain: Optional[str] = None, + **kwargs, + ) -> List[TaskAdapter]: + """Poll for multiple tasks in batch. + + Efficiently retrieves multiple tasks in a single operation. + + Args: + task_type: Type of task to poll for + worker_id: Optional worker ID for tracking + count: Number of tasks to poll for (default: 1) + timeout: Timeout in milliseconds (default: 100) + domain: Optional domain for task isolation + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TaskAdapter instances + + Example: + ```python + # Poll for up to 10 tasks + tasks = await task_client.batch_poll_tasks( + "process_order", + worker_id="worker-1", + count=10, + timeout=5000 + ) + print(f"Received {len(tasks)} tasks to process") + ``` + """ + return await self._task_api.batch_poll( + tasktype=task_type, + workerid=worker_id, + count=count, + timeout=timeout, + domain=domain, + **kwargs, + ) + # Task Operations - async def get_task(self, task_id: str) -> TaskAdapter: - """Get task by ID""" - return await self.task_api.get_task(task_id=task_id) + async def get_task(self, task_id: str, **kwargs) -> TaskAdapter: + """Get task by ID. - async def update_task(self, task_result: TaskResultAdapter) -> str: - """Update task with result""" - return await self.task_api.update_task(task_result=task_result) + Args: + task_id: Unique identifier for the task + **kwargs: Additional optional parameters to pass to the API + + Returns: + TaskAdapter instance containing task details + + Example: + ```python + task = await task_client.get_task("task-123") + print(f"Status: {task.status}, Output: {task.output_data}") + ``` + """ + return await self._task_api.get_task(task_id=task_id, **kwargs) + + async def update_task(self, task_result: TaskResultAdapter, **kwargs) -> str: + """Update task with result. + + Args: + task_result: Task result containing status, output data, and logs + **kwargs: Additional optional parameters to pass to the API + + Returns: + Task ID as string + + Example: + ```python + from conductor.asyncio_client.adapters.models.task_result_adapter import TaskResultAdapter + + result = TaskResultAdapter( + task_id="task-123", + status="COMPLETED", + output_data={"result": "success", "data": {...}} + ) + await task_client.update_task(result) + ``` + """ + return await self._task_api.update_task(task_result=task_result, **kwargs) async def update_task_by_ref_name( self, @@ -59,16 +227,42 @@ async def update_task_by_ref_name( status: str, output: Dict[str, Dict[str, Any]], worker_id: Optional[str] = None, + **kwargs, ) -> str: - """Update task by workflow ID and task reference name""" + """Update task by workflow ID and task reference name. + + Useful when you don't have the task ID but know the workflow and task reference. + + Args: + workflow_id: ID of the workflow containing the task + task_ref_name: Reference name of the task in the workflow + status: New task status (e.g., "COMPLETED", "FAILED") + output: Task output data + worker_id: Optional worker ID + **kwargs: Additional optional parameters to pass to the API + + Returns: + Task ID as string + + Example: + ```python + await task_client.update_task_by_ref_name( + workflow_id="workflow-123", + task_ref_name="process_order_ref", + status="COMPLETED", + output={"order_id": "12345", "status": "processed"} + ) + ``` + """ body = {"result": output} - return await self.task_api.update_task1( + return await self._task_api.update_task1( workflow_id=workflow_id, task_ref_name=task_ref_name, status=status, request_body=body, workerid=worker_id, + **kwargs, ) async def update_task_sync( @@ -78,27 +272,78 @@ async def update_task_sync( status: str, output: Dict[str, Any], worker_id: Optional[str] = None, + **kwargs, ) -> WorkflowAdapter: - """Update task synchronously by workflow ID and task reference name""" + """Update task synchronously by workflow ID and task reference name. + + Updates a task and waits for the workflow to process the update before returning. + + Args: + workflow_id: ID of the workflow containing the task + task_ref_name: Reference name of the task in the workflow + status: New task status (e.g., "COMPLETED", "FAILED") + output: Task output data + worker_id: Optional worker ID + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowAdapter instance with updated workflow state + + Example: + ```python + workflow = await task_client.update_task_sync( + workflow_id="workflow-123", + task_ref_name="validate_order_ref", + status="COMPLETED", + output={"is_valid": True} + ) + print(f"Workflow status: {workflow.status}") + ``` + """ body = {"result": output} - return await self.task_api.update_task_sync( + return await self._task_api.update_task_sync( workflow_id=workflow_id, task_ref_name=task_ref_name, status=status, request_body=body, workerid=worker_id, + **kwargs, ) # Task Queue Operations - async def get_task_queue_sizes(self) -> Dict[str, int]: - """Get the size of all task queues""" - return await self.task_api.all() + async def get_task_queue_sizes(self, **kwargs) -> Dict[str, int]: + """Get the size of all task queues. - async def get_task_queue_sizes_verbose( - self, - ) -> Dict[str, Dict[str, Dict[str, int]]]: - """Get detailed information about all task queues""" - return await self.task_api.all_verbose() + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping task types to queue sizes + + Example: + ```python + queue_sizes = await task_client.get_task_queue_sizes() + for task_type, size in queue_sizes.items(): + print(f"{task_type}: {size} tasks pending") + ``` + """ + return await self._task_api.all(**kwargs) + + async def get_task_queue_sizes_verbose(self, **kwargs) -> Dict[str, Dict[str, Dict[str, int]]]: + """Get detailed information about all task queues. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Nested dictionary with detailed queue information + + Example: + ```python + verbose_info = await task_client.get_task_queue_sizes_verbose() + ``` + """ + return await self._task_api.all_verbose(**kwargs) # Poll Data Operations async def get_all_poll_data( @@ -109,29 +354,136 @@ async def get_all_poll_data( queue_opt: Optional[str] = None, last_poll_time_size: Optional[int] = None, last_poll_time_opt: Optional[str] = None, + **kwargs, ) -> Dict[str, object]: - """Get the last poll data for all task types""" - return await self.task_api.get_all_poll_data( + """Get the last poll data for all task types. + + Args: + worker_size: Worker size parameter + worker_opt: Worker option parameter + queue_size: Queue size parameter + queue_opt: Queue option parameter + last_poll_time_size: Last poll time size parameter + last_poll_time_opt: Last poll time option parameter + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary with poll data for all task types + + Example: + ```python + poll_data = await task_client.get_all_poll_data() + ``` + """ + return await self._task_api.get_all_poll_data( worker_size=worker_size, worker_opt=worker_opt, queue_size=queue_size, queue_opt=queue_opt, last_poll_time_size=last_poll_time_size, last_poll_time_opt=last_poll_time_opt, + **kwargs, ) + @deprecated("get_poll_data is deprecated; use get_task_poll_data instead") + @typing_deprecated("get_poll_data is deprecated; use get_task_poll_data instead") async def get_poll_data(self, task_type: str) -> List[PollDataAdapter]: - """Get the last poll data for a specific task type""" - return await self.task_api.get_poll_data(task_type=task_type) + """Get the last poll data for a specific task type. + + .. deprecated:: + Use get_task_poll_data instead for consistent API interface. + + Args: + task_type: Type of task + + Returns: + List of PollDataAdapter instances + + Example: + ```python + poll_data = await task_client.get_poll_data("process_order") + ``` + """ + return await self._task_api.get_poll_data(task_type=task_type) + + async def get_task_poll_data(self, task_type: str, **kwargs) -> List[PollDataAdapter]: + """Get the last poll data for a specific task type. + + Args: + task_type: Type of task + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of PollDataAdapter instances + + Example: + ```python + poll_data = await task_client.get_task_poll_data("process_order") + for data in poll_data: + print(f"Worker: {data.worker_id}, Last poll: {data.last_poll_time}") + ``` + """ + return await self._task_api.get_poll_data(task_type=task_type, **kwargs) # Task Logging Operations async def get_task_logs(self, task_id: str) -> List[TaskExecLogAdapter]: - """Get task execution logs""" - return await self.task_api.get_task_logs(task_id=task_id) + """Get task execution logs. + + Args: + task_id: Unique identifier for the task + + Returns: + List of TaskExecLogAdapter instances containing log entries + + Example: + ```python + logs = await task_client.get_task_logs("task-123") + for log in logs: + print(f"{log.created_time}: {log.log}") + ``` + """ + return await self._task_api.get_task_logs(task_id=task_id) + @deprecated("log_task is deprecated; use add_task_log instead") + @typing_deprecated("log_task is deprecated; use add_task_log instead") async def log_task(self, task_id: str, log_message: str) -> None: - """Log task execution details""" - await self.task_api.log(task_id=task_id, body=log_message) + """Log task execution details. + + .. deprecated:: + Use add_task_log instead for consistent API interface. + + Args: + task_id: Unique identifier for the task + log_message: Log message to add + + Returns: + None + + Example: + ```python + await task_client.log_task("task-123", "Processing order...") + ``` + """ + await self._task_api.log(task_id=task_id, body=log_message) + + async def add_task_log(self, task_id: str, log_message: str, **kwargs) -> None: + """Add a task log. + + Args: + task_id: Unique identifier for the task + log_message: Log message to add + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await task_client.add_task_log("task-123", "Starting order processing") + await task_client.add_task_log("task-123", "Order validated successfully") + ``` + """ + await self._task_api.log(task_id=task_id, body=log_message, **kwargs) # Task Search Operations async def search_tasks( @@ -141,26 +493,105 @@ async def search_tasks( sort: Optional[str] = None, free_text: Optional[str] = None, query: Optional[str] = None, + **kwargs, ) -> SearchResultTaskSummaryAdapter: - """Search for tasks based on payload and other parameters + """Search for tasks based on payload and other parameters. Args: - start: Start index for pagination - size: Page size + start: Start index for pagination (default: 0) + size: Page size (default: 100) sort: Sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC free_text: Free text search - query: Query string + query: Structured query string + **kwargs: Additional optional parameters to pass to the API + + Returns: + SearchResultTaskSummaryAdapter with matching tasks + + Example: + ```python + # Search for failed tasks + results = await task_client.search_tasks( + query="status:FAILED", + size=50 + ) + print(f"Found {results.total_hits} failed tasks") + + # Free text search + results = await task_client.search_tasks(free_text="order processing") + ``` """ - return await self.task_api.search1( - start=start, size=size, sort=sort, free_text=free_text, query=query + return await self._task_api.search1( + start=start, size=size, sort=sort, free_text=free_text, query=query, **kwargs ) # Task Queue Management - async def requeue_pending_tasks(self, task_type: str) -> str: - """Requeue all pending tasks of a given task type""" - return await self.task_api.requeue_pending_task(task_type=task_type) + async def requeue_pending_tasks(self, task_type: str, **kwargs) -> str: + """Requeue all pending tasks of a given task type. + + Args: + task_type: Type of task to requeue + **kwargs: Additional optional parameters to pass to the API + + Returns: + Result message as string + + Example: + ```python + result = await task_client.requeue_pending_tasks("process_order") + print(result) + ``` + """ + return await self._task_api.requeue_pending_task(task_type=task_type, **kwargs) # Utility Methods + @deprecated("get_queue_size_for_task_type is deprecated; use get_queue_size_for_task instead") + @typing_deprecated( + "get_queue_size_for_task_type is deprecated; use get_queue_size_for_task instead" + ) async def get_queue_size_for_task_type(self, task_type: List[str]) -> Dict[str, int]: - """Get queue size for a specific task type""" - return await self.task_api.size(task_type=task_type) + """Get queue size for a specific task type. + + .. deprecated:: + Use get_queue_size_for_task instead for consistent API interface. + + Args: + task_type: List containing the task type name + + Returns: + Dictionary mapping task types to queue sizes + + Example: + ```python + sizes = await task_client.get_queue_size_for_task_type(["process_order"]) + ``` + """ + return await self._task_api.size(task_type=task_type) + + async def get_queue_size_for_task(self, task_type: List[str], **kwargs) -> int: + """Get queue size for a specific task type. + + Args: + task_type: List containing the task type name + **kwargs: Additional optional parameters to pass to the API + + Returns: + Queue size as integer + + Example: + ```python + size = await task_client.get_queue_size_for_task(["process_order"]) + print(f"Pending tasks: {size}") + ``` + """ + queue_sizes_by_task_type = await self._task_api.size(task_type=task_type, **kwargs) + + if isinstance(task_type, list) and task_type: + actual_task_type = task_type[0] + else: + actual_task_type = task_type + + queue_sizes_dict = cast(Dict[str, int], queue_sizes_by_task_type) + queue_size = queue_sizes_dict.get(actual_task_type, 0) + + return queue_size diff --git a/src/conductor/asyncio_client/orkes/orkes_workflow_client.py b/src/conductor/asyncio_client/orkes/orkes_workflow_client.py index b0f16f339..c8807c0fa 100644 --- a/src/conductor/asyncio_client/orkes/orkes_workflow_client.py +++ b/src/conductor/asyncio_client/orkes/orkes_workflow_client.py @@ -3,6 +3,9 @@ import uuid from typing import Any, Dict, List, Optional, cast +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated + from conductor.asyncio_client.adapters import ApiClient from conductor.asyncio_client.adapters.models.correlation_ids_search_request_adapter import ( CorrelationIdsSearchRequestAdapter, @@ -19,6 +22,12 @@ from conductor.asyncio_client.adapters.models.start_workflow_request_adapter import ( StartWorkflowRequestAdapter, ) +from conductor.asyncio_client.adapters.models.task_list_search_result_summary_adapter import ( + TaskListSearchResultSummaryAdapter, +) +from conductor.asyncio_client.adapters.models.upgrade_workflow_request_adapter import ( + UpgradeWorkflowRequestAdapter, +) from conductor.asyncio_client.adapters.models.workflow_adapter import WorkflowAdapter from conductor.asyncio_client.adapters.models.workflow_run_adapter import WorkflowRunAdapter from conductor.asyncio_client.adapters.models.workflow_state_update_adapter import ( @@ -34,6 +43,22 @@ class OrkesWorkflowClient(OrkesBaseClient): def __init__(self, configuration: Configuration, api_client: ApiClient): + """Initialize the OrkesWorkflowClient with configuration and API client. + + Args: + configuration: Configuration object containing server settings and authentication + api_client: ApiClient instance for making API requests + + Example: + ```python + from conductor.asyncio_client.configuration.configuration import Configuration + from conductor.asyncio_client.adapters import ApiClient + + config = Configuration(server_api_url="http://localhost:8080/api") + api_client = ApiClient(configuration=config) + workflow_client = OrkesWorkflowClient(config, api_client) + ``` + """ super().__init__(configuration, api_client) # Core Workflow Execution Operations @@ -46,9 +71,43 @@ async def start_workflow_by_name( priority: Optional[int] = None, x_idempotency_key: Optional[str] = None, x_on_conflict: Optional[str] = None, + **kwargs, ) -> str: - """Start a workflow by name with input data""" - return await self.workflow_api.start_workflow1( + """Start a workflow by name with input data. + + Args: + name: Name of the workflow to start + input_data: Input data for the workflow as dictionary + version: Optional workflow version. If None, uses latest version + correlation_id: Optional correlation ID for tracking related workflows + priority: Optional priority level (0-99, higher is more priority) + x_idempotency_key: Optional idempotency key to prevent duplicate executions + x_on_conflict: Optional conflict resolution strategy + **kwargs: Additional optional parameters to pass to the API + + Returns: + Workflow ID as string + + Example: + ```python + # Start a simple workflow + workflow_id = await workflow_client.start_workflow_by_name( + "order_processing", + {"order_id": "12345", "customer_id": "cust-999"} + ) + print(f"Started workflow: {workflow_id}") + + # Start with priority and correlation + workflow_id = await workflow_client.start_workflow_by_name( + "urgent_order_processing", + {"order_id": "99999"}, + version=2, + priority=10, + correlation_id="batch-2024-01" + ) + ``` + """ + return await self._workflow_api.start_workflow1( name=name, request_body=input_data, version=version, @@ -56,11 +115,36 @@ async def start_workflow_by_name( priority=priority, x_idempotency_key=x_idempotency_key, x_on_conflict=x_on_conflict, + **kwargs, ) - async def start_workflow(self, start_workflow_request: StartWorkflowRequestAdapter) -> str: - """Start a workflow with StartWorkflowRequest""" - return await self.workflow_api.start_workflow(start_workflow_request) + async def start_workflow( + self, start_workflow_request: StartWorkflowRequestAdapter, **kwargs + ) -> str: + """Start a workflow with StartWorkflowRequest. + + Args: + start_workflow_request: Complete workflow start request with all parameters + **kwargs: Additional optional parameters to pass to the API + + Returns: + Workflow ID as string + + Example: + ```python + from conductor.asyncio_client.adapters.models.start_workflow_request_adapter import StartWorkflowRequestAdapter + + request = StartWorkflowRequestAdapter( + name="order_processing", + version=1, + input={"order_id": "12345"}, + correlation_id="batch-001", + priority=5 + ) + workflow_id = await workflow_client.start_workflow(request) + ``` + """ + return await self._workflow_api.start_workflow(start_workflow_request, **kwargs) async def execute_workflow( self, @@ -68,40 +152,131 @@ async def execute_workflow( request_id: str, wait_until_task_ref: Optional[str] = None, wait_for_seconds: Optional[int] = None, + **kwargs, ) -> WorkflowRunAdapter: - """Execute a workflow synchronously""" - return await self.workflow_api.execute_workflow( + """Execute a workflow synchronously and wait for completion or specific task. + + Args: + start_workflow_request: Workflow start request + request_id: Unique request ID for idempotency + wait_until_task_ref: Optional task reference to wait for. If None, waits for completion + wait_for_seconds: Maximum seconds to wait. If None, waits indefinitely + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowRunAdapter containing execution results + + Example: + ```python + from conductor.asyncio_client.adapters.models.start_workflow_request_adapter import StartWorkflowRequestAdapter + import uuid + + request = StartWorkflowRequestAdapter( + name="order_processing", + input={"order_id": "12345"} + ) + result = await workflow_client.execute_workflow( + request, + request_id=str(uuid.uuid4()), + wait_for_seconds=30 + ) + print(f"Status: {result.status}, Output: {result.output}") + ``` + """ + return await self._workflow_api.execute_workflow( name=start_workflow_request.name, version=start_workflow_request.version or 1, request_id=request_id, start_workflow_request=start_workflow_request, wait_until_task_ref=wait_until_task_ref, wait_for_seconds=wait_for_seconds, + **kwargs, ) # Workflow Control Operations - async def pause_workflow(self, workflow_id: str) -> None: - """Pause a workflow execution""" - await self.workflow_api.pause_workflow(workflow_id=workflow_id) + async def pause_workflow(self, workflow_id: str, **kwargs) -> None: + """Pause a workflow execution. + + Args: + workflow_id: ID of the workflow to pause + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await workflow_client.pause_workflow("workflow-123") + ``` + """ + await self._workflow_api.pause_workflow(workflow_id=workflow_id, **kwargs) + + async def resume_workflow(self, workflow_id: str, **kwargs) -> None: + """Resume a paused workflow execution. + + Args: + workflow_id: ID of the workflow to resume + **kwargs: Additional optional parameters to pass to the API - async def resume_workflow(self, workflow_id: str) -> None: - """Resume a paused workflow execution""" - await self.workflow_api.resume_workflow(workflow_id=workflow_id) + Returns: + None + + Example: + ```python + await workflow_client.resume_workflow("workflow-123") + ``` + """ + await self._workflow_api.resume_workflow(workflow_id=workflow_id, **kwargs) async def restart_workflow( - self, workflow_id: str, use_latest_definitions: Optional[bool] = None + self, workflow_id: str, use_latest_definitions: Optional[bool] = None, **kwargs ) -> None: - """Restart a workflow execution""" - await self.workflow_api.restart( - workflow_id=workflow_id, use_latest_definitions=use_latest_definitions + """Restart a workflow execution from the beginning. + + Args: + workflow_id: ID of the workflow to restart + use_latest_definitions: If True, use latest workflow and task definitions + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Restart with latest definitions + await workflow_client.restart_workflow("workflow-123", use_latest_definitions=True) + ``` + """ + await self._workflow_api.restart( + workflow_id=workflow_id, use_latest_definitions=use_latest_definitions, **kwargs ) async def rerun_workflow( - self, workflow_id: str, rerun_workflow_request: RerunWorkflowRequestAdapter + self, workflow_id: str, rerun_workflow_request: RerunWorkflowRequestAdapter, **kwargs ) -> str: - """Rerun a workflow from a specific task""" - return await self.workflow_api.rerun( - workflow_id=workflow_id, rerun_workflow_request=rerun_workflow_request + """Rerun a workflow from a specific task. + + Args: + workflow_id: ID of the workflow to rerun + rerun_workflow_request: Configuration for rerun including from which task + **kwargs: Additional optional parameters to pass to the API + + Returns: + New workflow ID as string + + Example: + ```python + from conductor.asyncio_client.adapters.models.rerun_workflow_request_adapter import RerunWorkflowRequestAdapter + + rerun_request = RerunWorkflowRequestAdapter( + re_run_from_task_id="task-456", + task_input={"retry_count": 1} + ) + new_workflow_id = await workflow_client.rerun_workflow("workflow-123", rerun_request) + ``` + """ + return await self._workflow_api.rerun( + workflow_id=workflow_id, rerun_workflow_request=rerun_workflow_request, **kwargs ) async def retry_workflow( @@ -109,12 +284,29 @@ async def retry_workflow( workflow_id: str, resume_subworkflow_tasks: Optional[bool] = None, retry_if_retried_by_parent: Optional[bool] = None, + **kwargs, ) -> None: - """Retry a failed workflow execution""" - await self.workflow_api.retry( + """Retry a failed workflow execution. + + Args: + workflow_id: ID of the workflow to retry + resume_subworkflow_tasks: If True, resume subworkflow tasks + retry_if_retried_by_parent: If True, retry even if parent already retried + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await workflow_client.retry_workflow("workflow-123", resume_subworkflow_tasks=True) + ``` + """ + await self._workflow_api.retry( workflow_id=workflow_id, resume_subworkflow_tasks=resume_subworkflow_tasks, retry_if_retried_by_parent=retry_if_retried_by_parent, + **kwargs, ) async def terminate_workflow( @@ -122,19 +314,59 @@ async def terminate_workflow( workflow_id: str, reason: Optional[str] = None, trigger_failure_workflow: Optional[bool] = None, + **kwargs, ) -> None: - """Terminate a workflow execution""" - await self.workflow_api.terminate1( + """Terminate a workflow execution. + + Args: + workflow_id: ID of the workflow to terminate + reason: Optional reason for termination + trigger_failure_workflow: If True, trigger failure workflow if configured + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await workflow_client.terminate_workflow( + "workflow-123", + reason="Cancelled by user request" + ) + ``` + """ + await self._workflow_api.terminate1( workflow_id=workflow_id, reason=reason, trigger_failure_workflow=trigger_failure_workflow, + **kwargs, ) async def delete_workflow( - self, workflow_id: str, archive_workflow: Optional[bool] = None + self, workflow_id: str, archive_workflow: Optional[bool] = None, **kwargs ) -> None: - """Delete a workflow execution""" - await self.workflow_api.delete1(workflow_id=workflow_id, archive_workflow=archive_workflow) + """Delete a workflow execution. + + Args: + workflow_id: ID of the workflow to delete + archive_workflow: If True, archive instead of permanently deleting + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Permanently delete + await workflow_client.delete_workflow("workflow-123") + + # Archive workflow + await workflow_client.delete_workflow("workflow-123", archive_workflow=True) + ``` + """ + await self._workflow_api.delete1( + workflow_id=workflow_id, archive_workflow=archive_workflow, **kwargs + ) # Workflow Information Operations async def get_workflow( @@ -142,10 +374,28 @@ async def get_workflow( workflow_id: str, include_tasks: Optional[bool] = None, summarize: Optional[bool] = None, + **kwargs, ) -> WorkflowAdapter: - """Get workflow execution status and details""" - return await self.workflow_api.get_execution_status( - workflow_id=workflow_id, include_tasks=include_tasks, summarize=summarize + """Get workflow execution status and details. + + Args: + workflow_id: ID of the workflow + include_tasks: If True, include task details in the response + summarize: If True, return summarized information + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowAdapter instance with execution details + + Example: + ```python + workflow = await workflow_client.get_workflow("workflow-123", include_tasks=True) + print(f"Status: {workflow.status}") + print(f"Tasks: {len(workflow.tasks)}") + ``` + """ + return await self._workflow_api.get_execution_status( + workflow_id=workflow_id, include_tasks=include_tasks, summarize=summarize, **kwargs ) async def get_workflow_status_summary( @@ -153,12 +403,35 @@ async def get_workflow_status_summary( workflow_id: str, include_output: Optional[bool] = None, include_variables: Optional[bool] = None, + **kwargs, ) -> WorkflowStatusAdapter: - """Get workflow status summary""" - return await self.workflow_api.get_workflow_status_summary( + """Get workflow status summary. + + Args: + workflow_id: ID of the workflow + include_output: If True, include workflow output in the response + include_variables: If True, include workflow variables in the response + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowStatusAdapter with status information + + Example: + ```python + status = await workflow_client.get_workflow_status_summary( + "workflow-123", + include_output=True, + include_variables=True + ) + print(f"Status: {status.status}") + print(f"Output: {status.output}") + ``` + """ + return await self._workflow_api.get_workflow_status_summary( workflow_id=workflow_id, include_output=include_output, include_variables=include_variables, + **kwargs, ) async def get_running_workflows( @@ -167,10 +440,28 @@ async def get_running_workflows( version: Optional[int] = None, start_time: Optional[int] = None, end_time: Optional[int] = None, + **kwargs, ) -> List[str]: - """Get running workflow IDs""" - return await self.workflow_api.get_running_workflow( - name=name, version=version, start_time=start_time, end_time=end_time + """Get running workflow IDs. + + Args: + name: Name of the workflow + version: Optional workflow version filter + start_time: Optional start time filter (epoch milliseconds) + end_time: Optional end time filter (epoch milliseconds) + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of running workflow IDs + + Example: + ```python + running = await workflow_client.get_running_workflows("order_processing") + print(f"{len(running)} workflows currently running") + ``` + """ + return await self._workflow_api.get_running_workflow( + name=name, version=version, start_time=start_time, end_time=end_time, **kwargs ) async def get_workflows_by_correlation_ids( @@ -179,17 +470,41 @@ async def get_workflows_by_correlation_ids( correlation_ids: List[str], include_completed: Optional[bool] = None, include_tasks: Optional[bool] = None, + **kwargs, ) -> Dict[str, List[WorkflowAdapter]]: - """Get workflows by correlation IDs""" + """Get workflows by correlation IDs. + + Args: + workflow_name: Name of the workflow + correlation_ids: List of correlation IDs to search for + include_completed: If True, include completed workflows + include_tasks: If True, include task details + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping correlation IDs to lists of WorkflowAdapter instances + + Example: + ```python + workflows = await workflow_client.get_workflows_by_correlation_ids( + "order_processing", + ["batch-001", "batch-002"], + include_completed=True + ) + for corr_id, wfs in workflows.items(): + print(f"Correlation {corr_id}: {len(wfs)} workflows") + ``` + """ # Create correlation IDs search request search_request = CorrelationIdsSearchRequestAdapter( workflow_names=[workflow_name], correlation_ids=correlation_ids, ) - return await self.workflow_api.get_workflows1( + return await self._workflow_api.get_workflows1( correlation_ids_search_request=search_request, include_closed=include_completed, include_tasks=include_tasks, + **kwargs, ) async def get_workflows_by_correlation_ids_batch( @@ -197,10 +512,35 @@ async def get_workflows_by_correlation_ids_batch( batch_request: CorrelationIdsSearchRequestAdapter, include_completed: Optional[bool] = None, include_tasks: Optional[bool] = None, + **kwargs, ) -> Dict[str, List[WorkflowAdapter]]: - """Get workflows by correlation IDs in batch""" - return await self.workflow_api.get_workflows1( - batch_request, include_closed=include_completed, include_tasks=include_tasks + """Get workflows by correlation IDs in batch. + + Args: + batch_request: Batch request with workflow names and correlation IDs + include_completed: If True, include completed workflows + include_tasks: If True, include task details + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping correlation IDs to lists of WorkflowAdapter instances + + Example: + ```python + from conductor.asyncio_client.adapters.models.correlation_ids_search_request_adapter import CorrelationIdsSearchRequestAdapter + + batch = CorrelationIdsSearchRequestAdapter( + workflow_names=["order_processing", "payment_processing"], + correlation_ids=["batch-001", "batch-002"] + ) + workflows = await workflow_client.get_workflows_by_correlation_ids_batch(batch) + ``` + """ + return await self._workflow_api.get_workflows1( + correlation_ids_search_request=batch_request, + include_closed=include_completed, + include_tasks=include_tasks, + **kwargs, ) # Workflow Search Operations @@ -212,15 +552,45 @@ async def search_workflows( free_text: Optional[str] = None, query: Optional[str] = None, skip_cache: Optional[bool] = None, + **kwargs, ) -> ScrollableSearchResultWorkflowSummaryAdapter: - """Search for workflows based on payload and other parameters""" - return await self.workflow_api.search( + """Search for workflows based on payload and other parameters. + + Args: + start: Start index for pagination + size: Number of results to return + sort: Sort specification + free_text: Free text search query + query: Structured query string (e.g., "status:FAILED") + skip_cache: If True, skip cache and query database directly + **kwargs: Additional optional parameters to pass to the API + + Returns: + ScrollableSearchResultWorkflowSummaryAdapter with matching workflows + + Example: + ```python + # Search for failed workflows + results = await workflow_client.search_workflows( + query="status:FAILED", + size=50 + ) + print(f"Found {results.total_hits} failed workflows") + + # Search by workflow name + results = await workflow_client.search_workflows( + query="workflowType:order_processing" + ) + ``` + """ + return await self._workflow_api.search( start=start, size=size, sort=sort, free_text=free_text, query=query, skip_cache=skip_cache, + **kwargs, ) # Task Operations @@ -229,12 +599,32 @@ async def skip_task_from_workflow( workflow_id: str, task_reference_name: str, skip_task_request: Optional[SkipTaskRequestAdapter] = None, + **kwargs, ) -> None: - """Skip a task in a workflow""" - await self.workflow_api.skip_task_from_workflow( + """Skip a task in a workflow. + + Args: + workflow_id: ID of the workflow + task_reference_name: Reference name of the task to skip + skip_task_request: Optional skip request with input/output data + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await workflow_client.skip_task_from_workflow( + "workflow-123", + "manual_approval_task" + ) + ``` + """ + await self._workflow_api.skip_task_from_workflow( workflow_id=workflow_id, task_reference_name=task_reference_name, - skip_task_request=skip_task_request, + skip_task_request=skip_task_request, # type: ignore[arg-type] + **kwargs, ) async def jump_to_task( @@ -242,26 +632,66 @@ async def jump_to_task( workflow_id: str, task_reference_name: str, workflow_input: Optional[Dict[str, Any]] = None, + **kwargs, ) -> None: - """Jump to a specific task in a workflow""" - await self.workflow_api.jump_to_task( + """Jump to a specific task in a workflow. + + Args: + workflow_id: ID of the workflow + task_reference_name: Reference name of the task to jump to + workflow_input: Optional updated workflow input + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await workflow_client.jump_to_task( + "workflow-123", + "retry_payment_task", + workflow_input={"retry_count": 1} + ) + ``` + """ + await self._workflow_api.jump_to_task( workflow_id=workflow_id, task_reference_name=task_reference_name, request_body=workflow_input or {}, + **kwargs, ) # Workflow State Operations async def update_workflow_state( - self, workflow_id: str, workflow_state_update: WorkflowStateUpdateAdapter + self, workflow_id: str, workflow_state_update: WorkflowStateUpdateAdapter, **kwargs ) -> WorkflowAdapter: - """Update workflow state""" + """Update workflow state (variables, tasks, etc.). + + Args: + workflow_id: ID of the workflow + workflow_state_update: State update containing variables and task updates + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowAdapter with updated state + + Example: + ```python + from conductor.asyncio_client.adapters.models.workflow_state_update_adapter import WorkflowStateUpdateAdapter + + state_update = WorkflowStateUpdateAdapter( + variables={"retry_count": 2, "last_error": "timeout"} + ) + workflow = await workflow_client.update_workflow_state("workflow-123", state_update) + ``` + """ # Convert the adapter to dict for the API call if hasattr(workflow_state_update, "to_dict"): request_body: Dict[str, Any] = workflow_state_update.to_dict() else: request_body = cast(Dict[str, Any], workflow_state_update) - return await self.workflow_api.update_workflow_state( - workflow_id=workflow_id, request_body=request_body + return await self._workflow_api.update_workflow_state( + workflow_id=workflow_id, request_body=request_body, **kwargs ) async def update_workflow_and_task_state( @@ -271,28 +701,127 @@ async def update_workflow_and_task_state( request_id: str = str(uuid.uuid4()), wait_until_task_ref_name: Optional[str] = None, wait_for_seconds: Optional[int] = None, + **kwargs, ) -> WorkflowRunAdapter: - """Update workflow and task state""" - return await self.workflow_api.update_workflow_and_task_state( + """Update workflow and task state synchronously. + + Args: + workflow_id: ID of the workflow + workflow_state_update: State update containing variables and task updates + request_id: Unique request ID (default: generated UUID) + wait_until_task_ref_name: Optional task to wait for + wait_for_seconds: Maximum seconds to wait + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowRunAdapter with execution results + + Example: + ```python + from conductor.asyncio_client.adapters.models.workflow_state_update_adapter import WorkflowStateUpdateAdapter + + state_update = WorkflowStateUpdateAdapter( + variables={"step": "payment_processing"} + ) + result = await workflow_client.update_workflow_and_task_state( + "workflow-123", + state_update, + wait_for_seconds=30 + ) + ``` + """ + return await self._workflow_api.update_workflow_and_task_state( workflow_id=workflow_id, request_id=request_id, workflow_state_update=workflow_state_update, wait_until_task_ref=wait_until_task_ref_name, wait_for_seconds=wait_for_seconds, + **kwargs, ) # Advanced Operations - async def test_workflow(self, test_request: WorkflowTestRequestAdapter) -> WorkflowAdapter: - """Test a workflow definition""" - return await self.workflow_api.test_workflow(workflow_test_request=test_request) + async def test_workflow( + self, test_request: WorkflowTestRequestAdapter, **kwargs + ) -> WorkflowAdapter: + """Test a workflow definition without actually executing it. + + Args: + test_request: Workflow test request with definition and input + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowAdapter with simulated execution results - async def reset_workflow(self, workflow_id: str) -> None: - """Reset a workflow execution""" - await self.workflow_api.reset_workflow(workflow_id=workflow_id) + Example: + ```python + from conductor.asyncio_client.adapters.models.workflow_test_request_adapter import WorkflowTestRequestAdapter + test_request = WorkflowTestRequestAdapter( + workflow_def=workflow_definition, + input={"test_data": "value"} + ) + result = await workflow_client.test_workflow(test_request) + print(f"Test result: {result.status}") + ``` + """ + return await self._workflow_api.test_workflow(workflow_test_request=test_request, **kwargs) + + async def reset_workflow(self, workflow_id: str, **kwargs) -> None: + """Reset a workflow execution to initial state. + + Args: + workflow_id: ID of the workflow to reset + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await workflow_client.reset_workflow("workflow-123") + ``` + """ + await self._workflow_api.reset_workflow(workflow_id=workflow_id, **kwargs) + + @deprecated("decide_workflow is deprecated; use decide instead") + @typing_deprecated("decide_workflow is deprecated; use decide instead") async def decide_workflow(self, workflow_id: str) -> None: - """Trigger workflow decision processing""" - await self.workflow_api.decide(workflow_id=workflow_id) + """Trigger workflow decision processing. + + .. deprecated:: + Use decide instead for consistent API interface. + + Args: + workflow_id: ID of the workflow + + Returns: + None + + Example: + ```python + await workflow_client.decide_workflow("workflow-123") + ``` + """ + await self._workflow_api.decide(workflow_id=workflow_id) + + async def decide(self, workflow_id: str, **kwargs) -> None: + """Trigger workflow decision processing. + + Forces the workflow to re-evaluate and process pending tasks. + + Args: + workflow_id: ID of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await workflow_client.decide("workflow-123") + ``` + """ + await self._workflow_api.decide(workflow_id=workflow_id, **kwargs) # Convenience Methods (for backward compatibility) async def execute_workflow_with_return_strategy( @@ -301,13 +830,33 @@ async def execute_workflow_with_return_strategy( request_id: str, wait_until_task_ref: Optional[str] = None, wait_for_seconds: int = 30, + **kwargs, ) -> WorkflowRunAdapter: - """Execute a workflow synchronously - alias for execute_workflow""" + """Execute a workflow synchronously (alias for execute_workflow). + + Args: + start_workflow_request: Workflow start request + request_id: Unique request ID + wait_until_task_ref: Optional task to wait for + wait_for_seconds: Seconds to wait (default: 30) + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowRunAdapter with execution results + + Example: + ```python + result = await workflow_client.execute_workflow_with_return_strategy( + start_request, request_id="req-123", wait_for_seconds=60 + ) + ``` + """ return await self.execute_workflow( start_workflow_request=start_workflow_request, request_id=request_id, wait_until_task_ref=wait_until_task_ref, wait_for_seconds=wait_for_seconds, + **kwargs, ) async def get_by_correlation_ids( @@ -316,13 +865,33 @@ async def get_by_correlation_ids( correlation_ids: List[str], include_completed: bool = False, include_tasks: bool = False, + **kwargs, ) -> Dict[str, List[WorkflowAdapter]]: - """Alias for get_workflows_by_correlation_ids""" + """Get workflows by correlation IDs (alias). + + Args: + workflow_name: Name of the workflow + correlation_ids: List of correlation IDs + include_completed: If True, include completed workflows + include_tasks: If True, include task details + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping correlation IDs to workflow lists + + Example: + ```python + workflows = await workflow_client.get_by_correlation_ids( + "order_processing", ["batch-001"] + ) + ``` + """ return await self.get_workflows_by_correlation_ids( workflow_name=workflow_name, correlation_ids=correlation_ids, include_completed=include_completed, include_tasks=include_tasks, + **kwargs, ) async def get_by_correlation_ids_in_batch( @@ -330,12 +899,29 @@ async def get_by_correlation_ids_in_batch( batch_request: CorrelationIdsSearchRequestAdapter, include_completed: bool = False, include_tasks: bool = False, + **kwargs, ) -> Dict[str, List[WorkflowAdapter]]: - """Alias for get_workflows_by_correlation_ids_batch""" + """Get workflows by correlation IDs in batch (alias). + + Args: + batch_request: Batch correlation search request + include_completed: If True, include completed workflows + include_tasks: If True, include task details + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping correlation IDs to workflow lists + + Example: + ```python + workflows = await workflow_client.get_by_correlation_ids_in_batch(batch_request) + ``` + """ return await self.get_workflows_by_correlation_ids_batch( batch_request=batch_request, include_completed=include_completed, include_tasks=include_tasks, + **kwargs, ) async def search( @@ -345,41 +931,105 @@ async def search( free_text: str = "*", query: Optional[str] = None, skip_cache: Optional[bool] = None, + **kwargs, ) -> ScrollableSearchResultWorkflowSummaryAdapter: - """Alias for search_workflows for backward compatibility""" + """Search for workflows (alias for search_workflows). + + Args: + start: Start index for pagination (default: 0) + size: Number of results (default: 100) + free_text: Free text search (default: "*") + query: Structured query string + skip_cache: If True, skip cache + **kwargs: Additional optional parameters to pass to the API + + Returns: + ScrollableSearchResultWorkflowSummaryAdapter with results + + Example: + ```python + results = await workflow_client.search(query="status:FAILED") + ``` + """ return await self.search_workflows( start=start, size=size, free_text=free_text, query=query, skip_cache=skip_cache, + **kwargs, ) async def remove_workflow( - self, workflow_id: str, archive_workflow: Optional[bool] = None + self, workflow_id: str, archive_workflow: Optional[bool] = None, **kwargs ) -> None: - """Alias for delete_workflow""" - await self.delete_workflow(workflow_id=workflow_id, archive_workflow=archive_workflow) + """Delete a workflow (alias for delete_workflow). + + Args: + workflow_id: ID of the workflow to delete + archive_workflow: If True, archive instead of delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await workflow_client.remove_workflow("workflow-123") + ``` + """ + await self.delete_workflow( + workflow_id=workflow_id, archive_workflow=archive_workflow, **kwargs + ) async def update_variables( - self, workflow_id: str, variables: Optional[Dict[str, Any]] = None + self, workflow_id: str, variables: Optional[Dict[str, Any]] = None, **kwargs ) -> None: - """Update workflow variables - implemented via workflow state update""" + """Update workflow variables. + + Args: + workflow_id: ID of the workflow + variables: Dictionary of variables to update + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + await workflow_client.update_variables( + "workflow-123", + {"retry_count": 3, "last_attempt": "2024-01-01"} + ) + ``` + """ if variables: state_update = WorkflowStateUpdateAdapter() state_update.variables = variables await self.update_workflow_state( - workflow_id=workflow_id, workflow_state_update=state_update + workflow_id=workflow_id, workflow_state_update=state_update, **kwargs ) async def update_state( - self, - workflow_id: str, - update_request: WorkflowStateUpdateAdapter, + self, workflow_id: str, update_request: WorkflowStateUpdateAdapter, **kwargs ) -> WorkflowRunAdapter: - """Alias for update_workflow_state""" + """Update workflow state (alias for update_workflow_and_task_state). + + Args: + workflow_id: ID of the workflow + update_request: State update request + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowRunAdapter with results + + Example: + ```python + result = await workflow_client.update_state("workflow-123", state_update) + ``` + """ return await self.update_workflow_and_task_state( - workflow_id=workflow_id, workflow_state_update=update_request + workflow_id=workflow_id, workflow_state_update=update_request, **kwargs ) async def get_workflow_status( @@ -387,10 +1037,256 @@ async def get_workflow_status( workflow_id: str, include_output: Optional[bool] = None, include_variables: Optional[bool] = None, + **kwargs, ) -> WorkflowStatusAdapter: - """Alias for get_workflow_status_summary""" + """Get workflow status (alias for get_workflow_status_summary). + + Args: + workflow_id: ID of the workflow + include_output: If True, include output + include_variables: If True, include variables + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowStatusAdapter with status + + Example: + ```python + status = await workflow_client.get_workflow_status("workflow-123") + ``` + """ return await self.get_workflow_status_summary( workflow_id=workflow_id, include_output=include_output, include_variables=include_variables, + **kwargs, + ) + + async def execute_workflow_as_api( + self, + name: str, + request_body: Dict[str, Dict[str, Any]], + version: Optional[int] = None, + request_id: Optional[str] = None, + wait_until_task_ref: Optional[str] = None, + wait_for_seconds: Optional[int] = None, + x_idempotency_key: Optional[str] = None, + x_on_conflict: Optional[str] = None, + **kwargs, + ) -> Dict[str, Any]: + """Execute a workflow as an API call (POST method). + + Args: + name: Name of the workflow + request_body: Request body with workflow input + version: Optional workflow version + request_id: Optional unique request ID + wait_until_task_ref: Optional task to wait for + wait_for_seconds: Maximum seconds to wait + x_idempotency_key: Optional idempotency key + x_on_conflict: Optional conflict resolution strategy + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary with workflow execution results + + Example: + ```python + result = await workflow_client.execute_workflow_as_api( + "order_processing", + {"input": {"order_id": "12345"}}, + wait_for_seconds=30 + ) + ``` + """ + return await self._workflow_api.execute_workflow_as_api( + name=name, + request_body=request_body, + version=version, + request_id=request_id, + wait_until_task_ref=wait_until_task_ref, + wait_for_seconds=wait_for_seconds, + x_idempotency_key=x_idempotency_key, + x_on_conflict=x_on_conflict, + **kwargs, + ) + + async def execute_workflow_as_get_api( + self, + name: str, + version: Optional[int] = None, + request_id: Optional[str] = None, + wait_until_task_ref: Optional[str] = None, + wait_for_seconds: Optional[int] = None, + x_idempotency_key: Optional[str] = None, + x_on_conflict: Optional[str] = None, + **kwargs, + ) -> Dict[str, Any]: + """Execute a workflow as a GET API call. + + Args: + name: Name of the workflow + version: Optional workflow version + request_id: Optional unique request ID + wait_until_task_ref: Optional task to wait for + wait_for_seconds: Maximum seconds to wait + x_idempotency_key: Optional idempotency key + x_on_conflict: Optional conflict resolution strategy + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary with workflow execution results + + Example: + ```python + result = await workflow_client.execute_workflow_as_get_api( + "simple_workflow", + version=1, + wait_for_seconds=30 + ) + ``` + """ + return await self._workflow_api.execute_workflow_as_get_api( + name=name, + version=version, + request_id=request_id, + wait_until_task_ref=wait_until_task_ref, + wait_for_seconds=wait_for_seconds, + x_idempotency_key=x_idempotency_key, + x_on_conflict=x_on_conflict, + **kwargs, + ) + + async def get_execution_status_task_list( + self, + workflow_id: str, + start: Optional[int] = None, + count: Optional[int] = None, + status: Optional[List[str]] = None, + **kwargs, + ) -> TaskListSearchResultSummaryAdapter: + """Get the execution status task list for a workflow. + + Args: + workflow_id: ID of the workflow + start: Start index for pagination + count: Number of tasks to return + status: Optional list of task statuses to filter by + **kwargs: Additional optional parameters to pass to the API + + Returns: + TaskListSearchResultSummaryAdapter with task information + + Example: + ```python + tasks = await workflow_client.get_execution_status_task_list( + "workflow-123", + status=["FAILED", "TIMED_OUT"] + ) + ``` + """ + return await self._workflow_api.get_execution_status_task_list( + workflow_id=workflow_id, start=start, count=count, status=status, **kwargs + ) + + async def get_workflows( + self, + name: str, + request_body: List[str], + include_closed: Optional[bool] = None, + include_tasks: Optional[bool] = None, + **kwargs, + ) -> Dict[str, List[WorkflowAdapter]]: + """Get workflows by workflow IDs. + + Args: + name: Name of the workflow + request_body: List of workflow IDs to retrieve + include_closed: If True, include closed/completed workflows + include_tasks: If True, include task details + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping workflow IDs to workflow lists + + Example: + ```python + workflows = await workflow_client.get_workflows( + "order_processing", + ["wf-123", "wf-456"], + include_tasks=True + ) + ``` + """ + return await self._workflow_api.get_workflows( + name=name, + request_body=request_body, + include_closed=include_closed, + include_tasks=include_tasks, + **kwargs, + ) + + async def get_workflows_by_correlation_id( + self, + name: str, + correlation_id: str, + include_closed: Optional[bool] = None, + include_tasks: Optional[bool] = None, + **kwargs, + ) -> List[WorkflowAdapter]: + """Get workflows by single correlation ID. + + Args: + name: Name of the workflow + correlation_id: Correlation ID to search for + include_closed: If True, include closed/completed workflows + include_tasks: If True, include task details + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of WorkflowAdapter instances + + Example: + ```python + workflows = await workflow_client.get_workflows_by_correlation_id( + "order_processing", + "batch-001", + include_completed=True + ) + ``` + """ + return await self._workflow_api.get_workflows2( + name=name, + correlation_id=correlation_id, + include_closed=include_closed, + include_tasks=include_tasks, + **kwargs, + ) + + async def upgrade_running_workflow_to_version( + self, workflow_id: str, upgrade_workflow_request: UpgradeWorkflowRequestAdapter, **kwargs + ) -> None: + """Upgrade a running workflow to a new version. + + Args: + workflow_id: ID of the workflow to upgrade + upgrade_workflow_request: Upgrade request specifying target version + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.asyncio_client.adapters.models.upgrade_workflow_request_adapter import UpgradeWorkflowRequestAdapter + + upgrade_request = UpgradeWorkflowRequestAdapter(version=2) + await workflow_client.upgrade_running_workflow_to_version( + "workflow-123", + upgrade_request + ) + ``` + """ + await self._workflow_api.upgrade_running_workflow_to_version( + workflow_id=workflow_id, upgrade_workflow_request=upgrade_workflow_request, **kwargs ) diff --git a/src/conductor/asyncio_client/worker/worker.py b/src/conductor/asyncio_client/worker/worker.py index 8db00c837..5e02fcbdc 100644 --- a/src/conductor/asyncio_client/worker/worker.py +++ b/src/conductor/asyncio_client/worker/worker.py @@ -75,7 +75,7 @@ def execute(self, task: TaskAdapter) -> TaskResultAdapter: for input_name in params: typ = params[input_name].annotation default_value = params[input_name].default - if input_name in task.input_data: + if task.input_data is not None and input_name in task.input_data: if typ in utils.simple_types: task_input[input_name] = task.input_data[input_name] else: @@ -87,7 +87,7 @@ def execute(self, task: TaskAdapter) -> TaskResultAdapter: task_input[input_name] = default_value else: task_input[input_name] = None - task_output = self.execute_function(**task_input) + task_output = self.execute_function(**task_input) # type: ignore[call-arg] if isinstance(task_output, TaskResultAdapter): task_output.task_id = task.task_id @@ -120,7 +120,11 @@ def execute(self, task: TaskAdapter) -> TaskResultAdapter: if len(ne.args) > 0: task_result.reason_for_incompletion = ne.args[0] - if dataclasses.is_dataclass(type(task_result.output_data)): + if ( + task_result.output_data is not None + and dataclasses.is_dataclass(task_result.output_data) + and not isinstance(task_result.output_data, type) + ): task_output = dataclasses.asdict(task_result.output_data) task_result.output_data = task_output return task_result diff --git a/src/conductor/asyncio_client/workflow/task/dynamic_fork_task.py b/src/conductor/asyncio_client/workflow/task/dynamic_fork_task.py index f59e27ebd..357dd327a 100644 --- a/src/conductor/asyncio_client/workflow/task/dynamic_fork_task.py +++ b/src/conductor/asyncio_client/workflow/task/dynamic_fork_task.py @@ -25,7 +25,7 @@ def __init__( self.tasks_input_param_name = tasks_input_param_name self._join_task = deepcopy(join_task) if join_task else None - def to_workflow_task(self) -> List[WorkflowTaskAdapter]: + def to_workflow_task(self) -> List[WorkflowTaskAdapter]: # type: ignore[override] wf_task = super().to_workflow_task() wf_task.dynamic_fork_tasks_param = self.tasks_param wf_task.dynamic_fork_tasks_input_param_name = self.tasks_input_param_name diff --git a/src/conductor/asyncio_client/workflow/task/fork_task.py b/src/conductor/asyncio_client/workflow/task/fork_task.py index 43fb83111..30a1f1138 100644 --- a/src/conductor/asyncio_client/workflow/task/fork_task.py +++ b/src/conductor/asyncio_client/workflow/task/fork_task.py @@ -26,7 +26,7 @@ def __init__( self._forked_tasks = forked_tasks self._join_on = join_on - def to_workflow_task( + def to_workflow_task( # type: ignore[override] self, ) -> Union[WorkflowTaskAdapter, List[WorkflowTaskAdapter]]: workflow_task = super().to_workflow_task() diff --git a/src/conductor/asyncio_client/workflow/task/llm_tasks/llm_index_documents.py b/src/conductor/asyncio_client/workflow/task/llm_tasks/llm_index_documents.py index 264ef75bb..73ad8e715 100644 --- a/src/conductor/asyncio_client/workflow/task/llm_tasks/llm_index_documents.py +++ b/src/conductor/asyncio_client/workflow/task/llm_tasks/llm_index_documents.py @@ -63,7 +63,7 @@ def __init__( if doc_id is not None: optional_input_params.update({"docId": doc_id}) - input_params.update(optional_input_params) + input_params.update(optional_input_params) # type: ignore[arg-type] if task_name is None: task_name = "llm_index_document" diff --git a/src/conductor/asyncio_client/workflow/task/switch_task.py b/src/conductor/asyncio_client/workflow/task/switch_task.py index b64d621f2..56527b09f 100644 --- a/src/conductor/asyncio_client/workflow/task/switch_task.py +++ b/src/conductor/asyncio_client/workflow/task/switch_task.py @@ -31,7 +31,7 @@ def switch_case(self, case_name: str, tasks: List[TaskInterface]): def default_case(self, tasks: List[TaskInterface]): if isinstance(tasks, List): - self._default_case = deepcopy(tasks) + self._default_case = deepcopy(tasks) # type: ignore[assignment] else: self._default_case = [deepcopy(tasks)] return self @@ -43,6 +43,8 @@ def to_workflow_task(self) -> WorkflowTaskAdapter: workflow.expression = self._expression else: workflow.evaluator_type = EvaluatorType.VALUE_PARAM + if workflow.input_parameters is None: + workflow.input_parameters = {} workflow.input_parameters["switchCaseValue"] = self._expression workflow.expression = "switchCaseValue" workflow.decision_cases = {} diff --git a/src/conductor/asyncio_client/workflow/task/task.py b/src/conductor/asyncio_client/workflow/task/task.py index 163a5b389..84a83c2cb 100644 --- a/src/conductor/asyncio_client/workflow/task/task.py +++ b/src/conductor/asyncio_client/workflow/task/task.py @@ -127,7 +127,7 @@ def input_parameters(self, input_parameters: Dict[str, Any]) -> None: except AttributeError as err: raise ValueError(f"Invalid type: {type(input_parameters)}") from err - self._input_parameters: Dict[str, Any] = deepcopy(input_parameters) + self._input_parameters: Dict[str, Any] = deepcopy(input_parameters) # type: ignore[no-redef] def input_parameter(self, key: str, value: Any): if not isinstance(key, str): diff --git a/src/conductor/client/adapters/api/admin_resource_api_adapter.py b/src/conductor/client/adapters/api/admin_resource_api_adapter.py index 65b77122c..b75fc0814 100644 --- a/src/conductor/client/adapters/api/admin_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/admin_resource_api_adapter.py @@ -1,4 +1,30 @@ +from typing import Dict, List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.admin_resource_api import AdminResourceApi +from conductor.client.http.models.task import Task + + +class AdminResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = AdminResourceApi(api_client) + + def clear_task_execution_cache(self, task_def_name: str, **kwargs) -> None: + """Remove execution cached values for the task""" + return self._api.clear_task_execution_cache(task_def_name, **kwargs) + + def get_redis_usage(self, **kwargs) -> Dict[str, object]: + """Get the Redis usage""" + return self._api.get_redis_usage(**kwargs) + + def requeue_sweep(self, workflow_id: str, **kwargs) -> str: + """Queue up all the running workflows for sweep""" + return self._api.requeue_sweep(workflow_id, **kwargs) + def verify_and_repair_workflow_consistency(self, workflow_id: str, **kwargs) -> str: + """Verify and repair workflow consistency""" + return self._api.verify_and_repair_workflow_consistency(workflow_id, **kwargs) -class AdminResourceApiAdapter(AdminResourceApi): ... + def view(self, tasktype: str, **kwargs) -> List[Task]: + """View the task type""" + return self._api.view(tasktype, **kwargs) diff --git a/src/conductor/client/adapters/api/application_resource_api_adapter.py b/src/conductor/client/adapters/api/application_resource_api_adapter.py index 4e92b1bbc..5e5174e1c 100644 --- a/src/conductor/client/adapters/api/application_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/application_resource_api_adapter.py @@ -1,73 +1,112 @@ from typing import List +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.application_resource_api import ApplicationResourceApi +from conductor.client.http.models.create_or_update_application_request import ( + CreateOrUpdateApplicationRequest, +) from conductor.client.http.models.extended_conductor_application import ExtendedConductorApplication from conductor.client.orkes.models.metadata_tag import MetadataTag -class ApplicationResourceApiAdapter(ApplicationResourceApi): - def create_access_key(self, id: str, **kwargs): +class ApplicationResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = ApplicationResourceApi(api_client) + + def create_access_key(self, id: str, **kwargs) -> object: + """Create an access key for an application""" # Convert empty application id to None to prevent sending invalid data to server if not id: id = None - return super().create_access_key(id, **kwargs) + return self._api.create_access_key(id, **kwargs) - def add_role_to_application_user(self, application_id: str, role: str, **kwargs): + def add_role_to_application_user(self, application_id: str, role: str, **kwargs) -> object: + """Add a role to an application user""" # Convert empty application_id and role to None to prevent sending invalid data to server if not application_id: application_id = None if not role: role = None - return super().add_role_to_application_user(application_id, role, **kwargs) + return self._api.add_role_to_application_user(application_id, role, **kwargs) + + def create_application(self, body: CreateOrUpdateApplicationRequest, **kwargs) -> object: + """Create an application""" + return self._api.create_application(body, **kwargs) - def delete_access_key(self, application_id: str, key_id: str, **kwargs): + def delete_access_key(self, application_id: str, key_id: str, **kwargs) -> object: # Convert empty application_id and key_id to None to prevent sending invalid data to server if not application_id: application_id = None if not key_id: key_id = None - return super().delete_access_key(application_id, key_id, **kwargs) + return self._api.delete_access_key(application_id, key_id, **kwargs) - def remove_role_from_application_user(self, application_id: str, role: str, **kwargs): + def delete_application(self, id: str, **kwargs) -> object: + """Delete an application""" + return self._api.delete_application(id, **kwargs) + + def remove_role_from_application_user(self, application_id: str, role: str, **kwargs) -> object: + """Remove a role from an application user""" # Convert empty application_id and role to None to prevent sending invalid data to server if not application_id: application_id = None if not role: role = None - return super().remove_role_from_application_user(application_id, role, **kwargs) + return self._api.remove_role_from_application_user(application_id, role, **kwargs) - def get_app_by_access_key_id( - self, access_key_id: str, **kwargs - ) -> ExtendedConductorApplication: + def get_app_by_access_key_id(self, access_key_id: str, **kwargs) -> object: + """Get an application by access key id""" # Convert empty access_key_id to None to prevent sending invalid data to server if not access_key_id: access_key_id = None - return super().get_app_by_access_key_id(access_key_id, **kwargs) + return self._api.get_app_by_access_key_id(access_key_id, **kwargs) - def get_access_keys(self, id: str, **kwargs): + def get_access_keys(self, id: str, **kwargs) -> object: + """Get the access keys for an application""" # Convert empty application id to None to prevent sending invalid data to server if not id: id = None - return super().get_access_keys(id=id, **kwargs) + return self._api.get_access_keys(id=id, **kwargs) - def toggle_access_key_status(self, application_id: str, key_id: str, **kwargs): + def toggle_access_key_status(self, application_id: str, key_id: str, **kwargs) -> object: + """Toggle the status of an access key""" # Convert empty application_id and key_id to None to prevent sending invalid data to server if not application_id: application_id = None if not key_id: key_id = None - return super().toggle_access_key_status(application_id, key_id, **kwargs) + return self._api.toggle_access_key_status(application_id, key_id, **kwargs) def get_tags_for_application(self, application_id: str, **kwargs) -> List[MetadataTag]: + """Get the tags for an application""" # Convert empty application_id to None to prevent sending invalid data to server if not application_id: application_id = None - return super().get_tags_for_application(application_id, **kwargs) + return self._api.get_tags_for_application(application_id, **kwargs) def delete_tag_for_application(self, tag: List[MetadataTag], id: str, **kwargs) -> None: + """Delete a tag for an application""" # Convert empty application id and tag list to None to prevent sending invalid data to server if not id: id = None if not tag: tag = None - return super().delete_tag_for_application(tag, id, **kwargs) + return self._api.delete_tag_for_application(tag, id, **kwargs) + + def get_application(self, id: str, **kwargs) -> object: + """Get an application by id""" + return self._api.get_application(id, **kwargs) + + def list_applications(self, **kwargs) -> List[ExtendedConductorApplication]: + """List all applications""" + return self._api.list_applications(**kwargs) + + def put_tag_for_application(self, tag: List[MetadataTag], id: str, **kwargs) -> None: + """Put a tag for an application""" + return self._api.put_tag_for_application(tag, id, **kwargs) + + def update_application( + self, body: CreateOrUpdateApplicationRequest, id: str, **kwargs + ) -> object: + """Update an application""" + return self._api.update_application(body, id, **kwargs) diff --git a/src/conductor/client/adapters/api/authorization_resource_api_adapter.py b/src/conductor/client/adapters/api/authorization_resource_api_adapter.py index cdb35e40d..fd7963b32 100644 --- a/src/conductor/client/adapters/api/authorization_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/authorization_resource_api_adapter.py @@ -1,4 +1,21 @@ +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.authorization_resource_api import AuthorizationResourceApi +from conductor.client.http.models.authorization_request import AuthorizationRequest +from conductor.client.http.models.response import Response -class AuthorizationResourceApiAdapter(AuthorizationResourceApi): ... +class AuthorizationResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = AuthorizationResourceApi(api_client) + + def get_permissions(self, type: str, id: str, **kwargs) -> object: + """Get the access that have been granted over the given object""" + return self._api.get_permissions(type, id, **kwargs) + + def grant_permissions(self, body: AuthorizationRequest, **kwargs) -> Response: + """Grant permissions to the given object""" + return self._api.grant_permissions(body, **kwargs) + + def remove_permissions(self, body: AuthorizationRequest, **kwargs) -> Response: + """Remove permissions from the given object""" + return self._api.remove_permissions(body, **kwargs) diff --git a/src/conductor/client/adapters/api/environment_resource_api_adapter.py b/src/conductor/client/adapters/api/environment_resource_api_adapter.py index 1db93ef85..044e999cb 100644 --- a/src/conductor/client/adapters/api/environment_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/environment_resource_api_adapter.py @@ -1,4 +1,39 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.environment_resource_api import EnvironmentResourceApi +from conductor.client.http.models.environment_variable import EnvironmentVariable +from conductor.client.http.models.tag import Tag + + +class EnvironmentResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = EnvironmentResourceApi(api_client) + + def create_or_update_env_variable(self, body: str, key: str, **kwargs) -> None: + """Create or update an environment variable""" + return self._api.create_or_update_env_variable(body, key, **kwargs) + + def delete_env_variable(self, key: str, **kwargs) -> str: + """Delete an environment variable""" + return self._api.delete_env_variable(key, **kwargs) + + def delete_tag_for_env_var(self, body: List[Tag], name: str, **kwargs) -> None: + """Delete a tag for an environment variable""" + return self._api.delete_tag_for_env_var(body, name, **kwargs) + + def get(self, key: str, **kwargs) -> str: + """Get an environment variable""" + return self._api.get(key, **kwargs) + + def get_all(self, **kwargs) -> List[EnvironmentVariable]: + """Get all environment variables""" + return self._api.get_all(**kwargs) + def get_tags_for_env_var(self, name: str, **kwargs) -> List[Tag]: + """Get tags for an environment variable""" + return self._api.get_tags_for_env_var(name, **kwargs) -class EnvironmentResourceApiAdapter(EnvironmentResourceApi): ... + def put_tag_for_env_var(self, body: List[Tag], name: str, **kwargs) -> None: + """Put a tag for an environment variable""" + return self._api.put_tag_for_env_var(body, name, **kwargs) diff --git a/src/conductor/client/adapters/api/event_execution_resource_api_adapter.py b/src/conductor/client/adapters/api/event_execution_resource_api_adapter.py index 9794c0cef..dbd010d10 100644 --- a/src/conductor/client/adapters/api/event_execution_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/event_execution_resource_api_adapter.py @@ -1,4 +1,23 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.event_execution_resource_api import EventExecutionResourceApi +from conductor.client.http.models.extended_event_execution import ExtendedEventExecution +from conductor.client.http.models.search_result_handled_event_response import ( + SearchResultHandledEventResponse, +) + + +class EventExecutionResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = EventExecutionResourceApi(api_client) + def get_event_handlers_for_event1(self, **kwargs) -> SearchResultHandledEventResponse: + """Get all active event handlers for the last 24 hours""" + return self._api.get_event_handlers_for_event1(**kwargs) -class EventExecutionResourceApiAdapter(EventExecutionResourceApi): ... + def get_event_handlers_for_event2( + self, event: str, _from: str, **kwargs + ) -> List[ExtendedEventExecution]: + """Get event handlers for an event""" + return self._api.get_event_handlers_for_event2(event, _from, **kwargs) diff --git a/src/conductor/client/adapters/api/event_message_resource_api_adapter.py b/src/conductor/client/adapters/api/event_message_resource_api_adapter.py index e822e9ffa..5d4a54e0e 100644 --- a/src/conductor/client/adapters/api/event_message_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/event_message_resource_api_adapter.py @@ -1,4 +1,21 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.event_message_resource_api import EventMessageResourceApi +from conductor.client.http.models.event_message import EventMessage +from conductor.client.http.models.search_result_handled_event_response import ( + SearchResultHandledEventResponse, +) + + +class EventMessageResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = EventMessageResourceApi(api_client) + def get_events(self, **kwargs) -> SearchResultHandledEventResponse: + """Get all event handlers with statistics""" + return self._api.get_events(**kwargs) -class EventMessageResourceApiAdapter(EventMessageResourceApi): ... + def get_messages(self, event: str, **kwargs) -> List[EventMessage]: + """Get messages for an event""" + return self._api.get_messages(event, **kwargs) diff --git a/src/conductor/client/adapters/api/event_resource_api_adapter.py b/src/conductor/client/adapters/api/event_resource_api_adapter.py index 7e1d2e23a..d46066995 100644 --- a/src/conductor/client/adapters/api/event_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/event_resource_api_adapter.py @@ -1,4 +1,77 @@ +from typing import Dict, List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.event_resource_api import EventResourceApi +from conductor.client.http.models.connectivity_test_input import ConnectivityTestInput +from conductor.client.http.models.connectivity_test_result import ConnectivityTestResult +from conductor.client.http.models.event_handler import EventHandler +from conductor.client.http.models.tag import Tag + + +class EventResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = EventResourceApi(api_client) + + def add_event_handler(self, body: List[EventHandler], **kwargs) -> None: + """Add a new event handler""" + return self._api.add_event_handler(body, **kwargs) + + def delete_queue_config(self, queue_type: str, queue_name: str, **kwargs) -> None: + """Delete a queue config""" + return self._api.delete_queue_config(queue_type, queue_name, **kwargs) + + def delete_tag_for_event_handler(self, body: List[Tag], name: str, **kwargs) -> None: + """Delete a tag for an event handler""" + return self._api.delete_tag_for_event_handler(body, name, **kwargs) + + def get_event_handler_by_name(self, name: str, **kwargs) -> EventHandler: + """Get an event handler by name""" + return self._api.get_event_handler_by_name(name, **kwargs) + + def get_event_handlers(self, **kwargs) -> List[EventHandler]: + """Get all event handlers""" + return self._api.get_event_handlers(**kwargs) + + def get_event_handlers_for_event(self, event: str, **kwargs) -> List[EventHandler]: + """Get event handlers for an event""" + return self._api.get_event_handlers_for_event(event, **kwargs) + + def get_queue_config(self, queue_type: str, queue_name: str, **kwargs) -> Dict[str, object]: + """Get a queue config""" + return self._api.get_queue_config(queue_type, queue_name, **kwargs) + + def get_queue_names(self, **kwargs) -> Dict[str, str]: + """Get all queue names""" + return self._api.get_queue_names(**kwargs) + + def get_tags_for_event_handler(self, name: str, **kwargs) -> List[Tag]: + """Get tags for an event handler""" + return self._api.get_tags_for_event_handler(name, **kwargs) + + def handle_incoming_event(self, body: Dict[str, object], **kwargs) -> None: + """Handle an incoming event""" + return self._api.handle_incoming_event(body, **kwargs) + + def put_queue_config(self, body: str, queue_type: str, queue_name: str, **kwargs) -> None: + """Put a queue config""" + return self._api.put_queue_config(body, queue_type, queue_name, **kwargs) + + def put_tag_for_event_handler(self, body: List[Tag], name: str, **kwargs) -> None: + """Put a tag for an event handler""" + return self._api.put_tag_for_event_handler(body, name, **kwargs) + + def remove_event_handler_status(self, name: str, **kwargs) -> None: + """Remove the status of an event handler""" + return self._api.remove_event_handler_status(name, **kwargs) + + def test(self, **kwargs) -> EventHandler: + """Test the event handler""" + return self._api.test(**kwargs) + def test_connectivity(self, body: ConnectivityTestInput, **kwargs) -> ConnectivityTestResult: + """Test the connectivity of an event handler""" + return self._api.test_connectivity(body, **kwargs) -class EventResourceApiAdapter(EventResourceApi): ... + def update_event_handler(self, body: EventHandler, **kwargs) -> None: + """Update an event handler""" + return self._api.update_event_handler(body, **kwargs) diff --git a/src/conductor/client/adapters/api/group_resource_api_adapter.py b/src/conductor/client/adapters/api/group_resource_api_adapter.py index cf9f1f365..93c13d248 100644 --- a/src/conductor/client/adapters/api/group_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/group_resource_api_adapter.py @@ -1,4 +1,53 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.group_resource_api import GroupResourceApi +from conductor.client.http.models.granted_access_response import GrantedAccessResponse +from conductor.client.http.models.group import Group +from conductor.client.http.models.response import Response +from conductor.client.http.models.upsert_group_request import UpsertGroupRequest + + +class GroupResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = GroupResourceApi(api_client) + + def add_user_to_group(self, group_id: str, user_id: str, **kwargs) -> object: + """Add a user to a group""" + return self._api.add_user_to_group(group_id, user_id, **kwargs) + + def add_users_to_group(self, body: List[str], group_id: str, **kwargs) -> None: + """Add users to a group""" + return self._api.add_users_to_group(body, group_id, **kwargs) + + def delete_group(self, id: str, **kwargs) -> Response: + """Delete a group""" + return self._api.delete_group(id, **kwargs) + + def get_granted_permissions1(self, group_id: str, **kwargs) -> GrantedAccessResponse: + """Get granted permissions for a group""" + return self._api.get_granted_permissions1(group_id, **kwargs) + + def get_group(self, id: str, **kwargs) -> object: + """Get a group""" + return self._api.get_group(id, **kwargs) + + def get_users_in_group(self, id: str, **kwargs) -> object: + """Get users in a group""" + return self._api.get_users_in_group(id, **kwargs) + + def list_groups(self, **kwargs) -> List[Group]: + """List groups""" + return self._api.list_groups(**kwargs) + + def remove_user_from_group(self, group_id: str, user_id: str, **kwargs) -> object: + """Remove a user from a group""" + return self._api.remove_user_from_group(group_id, user_id, **kwargs) + def remove_users_from_group(self, body: List[str], group_id: str, **kwargs) -> None: + """Remove users from a group""" + return self._api.remove_users_from_group(body, group_id, **kwargs) -class GroupResourceApiAdapter(GroupResourceApi): ... + def upsert_group(self, body: UpsertGroupRequest, id: str, **kwargs) -> object: + """Upsert a group""" + return self._api.upsert_group(body, id, **kwargs) diff --git a/src/conductor/client/adapters/api/incoming_webhook_resource_api_adapter.py b/src/conductor/client/adapters/api/incoming_webhook_resource_api_adapter.py index 4874c8757..2e9cece22 100644 --- a/src/conductor/client/adapters/api/incoming_webhook_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/incoming_webhook_resource_api_adapter.py @@ -1,4 +1,19 @@ +from typing import Dict + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.incoming_webhook_resource_api import IncomingWebhookResourceApi -class IncomingWebhookResourceApiAdapter(IncomingWebhookResourceApi): ... +class IncomingWebhookResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = IncomingWebhookResourceApi(api_client) + + def handle_webhook(self, id: str, request_params: Dict[str, object], **kwargs) -> str: + """Handle a webhook""" + return self._api.handle_webhook(id, request_params, **kwargs) + + def handle_webhook1( + self, body: str, request_params: Dict[str, object], id: str, **kwargs + ) -> str: + """Handle a webhook""" + return self._api.handle_webhook1(body, request_params, id, **kwargs) diff --git a/src/conductor/client/adapters/api/integration_resource_api_adapter.py b/src/conductor/client/adapters/api/integration_resource_api_adapter.py index 16d257e25..9624ffe57 100644 --- a/src/conductor/client/adapters/api/integration_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/integration_resource_api_adapter.py @@ -1,4 +1,131 @@ +from typing import Dict, List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.integration_resource_api import IntegrationResourceApi +from conductor.client.http.models.event_log import EventLog +from conductor.client.http.models.integration import Integration +from conductor.client.http.models.integration_api import IntegrationApi +from conductor.client.http.models.integration_api_update import IntegrationApiUpdate +from conductor.client.http.models.integration_def import IntegrationDef +from conductor.client.http.models.integration_update import IntegrationUpdate +from conductor.client.http.models.message_template import MessageTemplate +from conductor.client.http.models.tag import Tag + + +class IntegrationResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = IntegrationResourceApi(api_client) + + def associate_prompt_with_integration( + self, integration_provider: str, integration_name: str, prompt_name: str, **kwargs + ) -> None: + """Associate a Prompt Template with an Integration""" + return self._api.associate_prompt_with_integration( + integration_provider, integration_name, prompt_name, **kwargs + ) + + def delete_integration_api(self, name: str, integration_name: str, **kwargs) -> None: + """Delete an Integration""" + return self._api.delete_integration_api(name, integration_name, **kwargs) + + def delete_integration_provider(self, name: str, **kwargs) -> None: + """Delete an Integration Provider""" + return self._api.delete_integration_provider(name, **kwargs) + + def delete_tag_for_integration( + self, body: List[Tag], name: str, integration_name: str, **kwargs + ) -> None: + """Delete a Tag for an Integration""" + return self._api.delete_tag_for_integration(body, name, integration_name, **kwargs) + + def delete_tag_for_integration_provider(self, body: List[Tag], name: str, **kwargs) -> None: + """Delete a Tag for an Integration Provider""" + return self._api.delete_tag_for_integration_provider(body, name, **kwargs) + + def get_all_integrations(self, **kwargs) -> List[Integration]: + """Get all Integrations""" + return self._api.get_all_integrations(**kwargs) + + def get_integration_api(self, name: str, integration_name: str, **kwargs) -> IntegrationApi: + """Get an Integration API""" + return self._api.get_integration_api(name, integration_name, **kwargs) + + def get_integration_apis(self, name: str, **kwargs) -> List[IntegrationApi]: + """Get Integrations of an Integration Provider""" + return self._api.get_integration_apis(name, **kwargs) + + def get_integration_available_apis(self, name: str, **kwargs) -> List[str]: + """Get Integrations Available for an Integration Provider""" + return self._api.get_integration_available_apis(name, **kwargs) + + def get_integration_provider(self, name: str, **kwargs) -> Integration: + """Get an Integration Provider""" + return self._api.get_integration_provider(name, **kwargs) + + def get_integration_provider_defs(self, **kwargs) -> List[IntegrationDef]: + """Get Integration provider definitions""" + return self._api.get_integration_provider_defs(**kwargs) + + def get_integration_providers(self, **kwargs) -> List[Integration]: + """Get all Integration Providers""" + return self._api.get_integration_providers(**kwargs) + + def get_prompts_with_integration( + self, integration_provider: str, integration_name: str, **kwargs + ) -> List[MessageTemplate]: + """Get the list of prompt templates associated with an integration""" + return self._api.get_prompts_with_integration( + integration_provider, integration_name, **kwargs + ) + + def get_providers_and_integrations(self, **kwargs) -> List[str]: + """Get Integrations Providers and Integrations combo""" + return self._api.get_providers_and_integrations(**kwargs) + + def get_tags_for_integration(self, name: str, integration_name: str, **kwargs) -> List[Tag]: + """Get tags for an Integration""" + return self._api.get_tags_for_integration(name, integration_name, **kwargs) + + def get_tags_for_integration_provider(self, name: str, **kwargs) -> List[Tag]: + """Get tags for an Integration Provider""" + return self._api.get_tags_for_integration_provider(name, **kwargs) + + def get_token_usage_for_integration(self, name: str, integration_name: str, **kwargs) -> int: + """Get Token Usage by Integration""" + return self._api.get_token_usage_for_integration(name, integration_name, **kwargs) + + def get_token_usage_for_integration_provider(self, name: str, **kwargs) -> Dict[str, str]: + """Get Token Usage by Integration Provider""" + return self._api.get_token_usage_for_integration_provider(name, **kwargs) + + def put_tag_for_integration( + self, body: List[Tag], name: str, integration_name: str, **kwargs + ) -> None: + """Put a Tag for an Integration""" + return self._api.put_tag_for_integration(body, name, integration_name, **kwargs) + + def put_tag_for_integration_provider(self, body: List[Tag], name: str, **kwargs) -> None: + """Put a Tag for an Integration Provider""" + return self._api.put_tag_for_integration_provider(body, name, **kwargs) + + def record_event_stats(self, body: List[EventLog], type: str, **kwargs) -> None: + """Record Event Stats""" + return self._api.record_event_stats(body, type, **kwargs) + + def register_token_usage(self, body: int, name: str, integration_name: str, **kwargs) -> None: + """Register Token Usage""" + return self._api.register_token_usage(body, name, integration_name, **kwargs) + + def save_all_integrations(self, body: List[Integration], **kwargs) -> None: + """Save all Integrations""" + return self._api.save_all_integrations(body, **kwargs) + def save_integration_api( + self, body: IntegrationApiUpdate, name: str, integration_name: str, **kwargs + ) -> None: + """Save an Integration API""" + return self._api.save_integration_api(body, name, integration_name, **kwargs) -class IntegrationResourceApiAdapter(IntegrationResourceApi): ... + def save_integration_provider(self, body: IntegrationUpdate, name: str, **kwargs) -> None: + """Save an Integration Provider""" + return self._api.save_integration_provider(body, name, **kwargs) diff --git a/src/conductor/client/adapters/api/limits_resource_api_adapter.py b/src/conductor/client/adapters/api/limits_resource_api_adapter.py index 7d10e2634..78f47a3ee 100644 --- a/src/conductor/client/adapters/api/limits_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/limits_resource_api_adapter.py @@ -1,4 +1,13 @@ +from typing import Dict + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.limits_resource_api import LimitsResourceApi -class LimitsResourceApiAdapter(LimitsResourceApi): ... +class LimitsResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = LimitsResourceApi(api_client) + + def get2(self, **kwargs) -> Dict[str, object]: + """Get Limits""" + return self._api.get2(**kwargs) diff --git a/src/conductor/client/adapters/api/metadata_resource_api_adapter.py b/src/conductor/client/adapters/api/metadata_resource_api_adapter.py index 36ef9cc1d..cb6c56acb 100644 --- a/src/conductor/client/adapters/api/metadata_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/metadata_resource_api_adapter.py @@ -1,4 +1,62 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.metadata_resource_api import MetadataResourceApi +from conductor.client.http.models.extended_task_def import ExtendedTaskDef +from conductor.client.http.models.extended_workflow_def import ExtendedWorkflowDef +from conductor.client.http.models.incoming_bpmn_file import IncomingBpmnFile +from conductor.client.http.models.task_def import TaskDef +from conductor.client.http.models.workflow_def import WorkflowDef + + +class MetadataResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = MetadataResourceApi(api_client) + + def create(self, body: ExtendedWorkflowDef, **kwargs) -> object: + """Create a new workflow definition""" + return self._api.create(body, **kwargs) + + def get1(self, name: str, **kwargs) -> WorkflowDef: + """Get a workflow definition""" + return self._api.get1(name, **kwargs) + + def get_task_def(self, tasktype: str, **kwargs) -> object: + """Get a task definition""" + return self._api.get_task_def(tasktype, **kwargs) + + def get_task_defs(self, **kwargs) -> List[TaskDef]: + """Get all task definitions""" + return self._api.get_task_defs(**kwargs) + + def get_workflow_defs(self, **kwargs) -> List[WorkflowDef]: + """Get all workflow definitions""" + return self._api.get_workflow_defs(**kwargs) + + def register_task_def(self, body: List[ExtendedTaskDef], **kwargs) -> object: + """Register a task definition""" + return self._api.register_task_def(body, **kwargs) + + def unregister_task_def(self, tasktype: str, **kwargs) -> None: + """Unregister a task definition""" + return self._api.unregister_task_def(tasktype, **kwargs) + + def unregister_workflow_def(self, name: str, version: int, **kwargs) -> None: + """Unregister a workflow definition""" + return self._api.unregister_workflow_def(name, version, **kwargs) + + def update(self, body: List[ExtendedWorkflowDef], **kwargs) -> object: + """Update a workflow definition""" + return self._api.update(body, **kwargs) + + def update_task_def(self, body: ExtendedTaskDef, **kwargs) -> object: + """Update a task definition""" + return self._api.update_task_def(body, **kwargs) + def upload_bpmn_file(self, body: IncomingBpmnFile, **kwargs) -> List[ExtendedWorkflowDef]: + """Upload a BPMN file""" + return self._api.upload_bpmn_file(body, **kwargs) -class MetadataResourceApiAdapter(MetadataResourceApi): ... + def upload_workflows_and_tasks_definitions_to_s3(self, **kwargs) -> None: + """Upload all workflows and tasks definitions to Object storage if configured""" + return self._api.upload_workflows_and_tasks_definitions_to_s3(**kwargs) diff --git a/src/conductor/client/adapters/api/metrics_resource_api_adapter.py b/src/conductor/client/adapters/api/metrics_resource_api_adapter.py index d069c8d24..08fce219d 100644 --- a/src/conductor/client/adapters/api/metrics_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/metrics_resource_api_adapter.py @@ -1,4 +1,16 @@ +from typing import Dict + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.metrics_resource_api import MetricsResourceApi +from conductor.client.http.models.json_node import JsonNode + +class MetricsResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = MetricsResourceApi(api_client) -class MetricsResourceApiAdapter(MetricsResourceApi): ... + def prometheus_task_metrics( + self, task_name: str, start: str, end: str, step: str, **kwargs + ) -> Dict[str, JsonNode]: + """Returns prometheus task metrics""" + return self._api.prometheus_task_metrics(task_name, start, end, step, **kwargs) diff --git a/src/conductor/client/adapters/api/metrics_token_resource_api_adapter.py b/src/conductor/client/adapters/api/metrics_token_resource_api_adapter.py index 52c347f38..d8467f413 100644 --- a/src/conductor/client/adapters/api/metrics_token_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/metrics_token_resource_api_adapter.py @@ -1,4 +1,12 @@ +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.metrics_token_resource_api import MetricsTokenResourceApi +from conductor.client.http.models.metrics_token import MetricsToken -class MetricsTokenResourceApiAdapter(MetricsTokenResourceApi): ... +class MetricsTokenResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = MetricsTokenResourceApi(api_client) + + def token(self, **kwargs) -> MetricsToken: + """Returns the metrics token""" + return self._api.token(**kwargs) diff --git a/src/conductor/client/adapters/api/prompt_resource_api_adapter.py b/src/conductor/client/adapters/api/prompt_resource_api_adapter.py index e32d4a4c1..da11e0e8e 100644 --- a/src/conductor/client/adapters/api/prompt_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/prompt_resource_api_adapter.py @@ -1,4 +1,48 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.prompt_resource_api import PromptResourceApi +from conductor.client.http.models.message_template import MessageTemplate +from conductor.client.http.models.prompt_template_test_request import PromptTemplateTestRequest +from conductor.client.http.models.tag import Tag + + +class PromptResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = PromptResourceApi(api_client) + + def create_message_templates(self, body: List[MessageTemplate], **kwargs) -> None: + """Create message templates in bulk""" + return self._api.create_message_templates(body, **kwargs) + + def delete_message_template(self, name: str, **kwargs) -> None: + """Delete a message template""" + return self._api.delete_message_template(name, **kwargs) + + def delete_tag_for_prompt_template(self, body: List[Tag], name: str, **kwargs) -> None: + """Delete a tag for a prompt template""" + return self._api.delete_tag_for_prompt_template(body, name, **kwargs) + + def get_message_template(self, name: str, **kwargs) -> MessageTemplate: + """Get a message template""" + return self._api.get_message_template(name, **kwargs) + + def get_message_templates(self, **kwargs) -> List[MessageTemplate]: + """Get all message templates""" + return self._api.get_message_templates(**kwargs) + + def get_tags_for_prompt_template(self, name: str, **kwargs) -> List[Tag]: + """Get tags for a prompt template""" + return self._api.get_tags_for_prompt_template(name, **kwargs) + + def put_tag_for_prompt_template(self, body: List[Tag], name: str, **kwargs) -> None: + """Put a tag for a prompt template""" + return self._api.put_tag_for_prompt_template(body, name, **kwargs) + def save_message_template(self, body: str, description: str, name: str, **kwargs) -> None: + """Save a message template""" + return self._api.save_message_template(body, description, name, **kwargs) -class PromptResourceApiAdapter(PromptResourceApi): ... + def test_message_template(self, body: PromptTemplateTestRequest, **kwargs) -> str: + """Test a message template""" + return self._api.test_message_template(body, **kwargs) diff --git a/src/conductor/client/adapters/api/queue_admin_resource_api_adapter.py b/src/conductor/client/adapters/api/queue_admin_resource_api_adapter.py index 247b19493..336e316a5 100644 --- a/src/conductor/client/adapters/api/queue_admin_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/queue_admin_resource_api_adapter.py @@ -1,4 +1,17 @@ +from typing import Dict + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.queue_admin_resource_api import QueueAdminResourceApi -class QueueAdminResourceApiAdapter(QueueAdminResourceApi): ... +class QueueAdminResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = QueueAdminResourceApi(api_client) + + def names(self, **kwargs) -> Dict[str, str]: + """Get Queue Names""" + return self._api.names(**kwargs) + + def size1(self, **kwargs) -> Dict[str, Dict[str, int]]: + """Get Queue Size""" + return self._api.size1(**kwargs) diff --git a/src/conductor/client/adapters/api/scheduler_bulk_resource_api_adapter.py b/src/conductor/client/adapters/api/scheduler_bulk_resource_api_adapter.py index dcffbef9c..5e86fb95e 100644 --- a/src/conductor/client/adapters/api/scheduler_bulk_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/scheduler_bulk_resource_api_adapter.py @@ -1,4 +1,18 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.scheduler_bulk_resource_api import SchedulerBulkResourceApi +from conductor.client.http.models.bulk_response import BulkResponse + + +class SchedulerBulkResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = SchedulerBulkResourceApi(api_client) + def pause_schedules(self, body: List[str], **kwargs) -> BulkResponse: + """Pause the list of schedules""" + return self._api.pause_schedules(body, **kwargs) -class SchedulerBulkResourceApiAdapter(SchedulerBulkResourceApi): ... + def resume_schedules(self, body: List[str], **kwargs) -> BulkResponse: + """Resume the list of schedules""" + return self._api.resume_schedules(body, **kwargs) diff --git a/src/conductor/client/adapters/api/scheduler_resource_api_adapter.py b/src/conductor/client/adapters/api/scheduler_resource_api_adapter.py index f74499e51..9b34ca646 100644 --- a/src/conductor/client/adapters/api/scheduler_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/scheduler_resource_api_adapter.py @@ -1,4 +1,76 @@ +from typing import Dict, List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.scheduler_resource_api import SchedulerResourceApi +from conductor.client.http.models.save_schedule_request import SaveScheduleRequest +from conductor.client.http.models.search_result_workflow_schedule_execution_model import ( + SearchResultWorkflowScheduleExecutionModel, +) +from conductor.client.http.models.tag import Tag +from conductor.client.http.models.workflow_schedule import WorkflowSchedule +from conductor.client.http.models.workflow_schedule_model import WorkflowScheduleModel + + +class SchedulerResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = SchedulerResourceApi(api_client) + + def delete_schedule(self, name: str, **kwargs) -> object: + """Delete a schedule""" + return self._api.delete_schedule(name, **kwargs) + + def delete_tag_for_schedule(self, body: List[Tag], name: str, **kwargs) -> None: + """Delete a tag for a schedule""" + return self._api.delete_tag_for_schedule(body, name, **kwargs) + + def get_all_schedules(self, **kwargs) -> List[WorkflowScheduleModel]: + """Get all schedules""" + return self._api.get_all_schedules(**kwargs) + + def get_next_few_schedules(self, cron_expression: str, **kwargs) -> List[int]: + """Get the next few schedules""" + return self._api.get_next_few_schedules(cron_expression, **kwargs) + + def get_schedule(self, name: str, **kwargs) -> WorkflowSchedule: + """Get a schedule""" + return self._api.get_schedule(name, **kwargs) + + def get_schedules_by_tag(self, tag: str, **kwargs) -> List[WorkflowScheduleModel]: + """Get schedules by tag""" + return self._api.get_schedules_by_tag(tag, **kwargs) + + def get_tags_for_schedule(self, name: str, **kwargs) -> List[Tag]: + """Get tags for a schedule""" + return self._api.get_tags_for_schedule(name, **kwargs) + + def pause_all_schedules(self, **kwargs) -> Dict[str, object]: + """Pause all schedules""" + return self._api.pause_all_schedules(**kwargs) + + def pause_schedule(self, name: str, **kwargs) -> object: + """Pause a schedule""" + return self._api.pause_schedule(name, **kwargs) + + def put_tag_for_schedule(self, body: List[Tag], name: str, **kwargs) -> None: + """Put a tag for a schedule""" + return self._api.put_tag_for_schedule(body, name, **kwargs) + + def requeue_all_execution_records(self, **kwargs) -> Dict[str, object]: + """Requeue all execution records""" + return self._api.requeue_all_execution_records(**kwargs) + + def resume_all_schedules(self, **kwargs) -> Dict[str, object]: + """Resume all schedules""" + return self._api.resume_all_schedules(**kwargs) + + def resume_schedule(self, name: str, **kwargs) -> object: + """Resume a schedule""" + return self._api.resume_schedule(name, **kwargs) + def save_schedule(self, body: SaveScheduleRequest, **kwargs) -> object: + """Save a schedule""" + return self._api.save_schedule(body, **kwargs) -class SchedulerResourceApiAdapter(SchedulerResourceApi): ... + def search_v2(self, **kwargs) -> SearchResultWorkflowScheduleExecutionModel: + """Search for workflows based on payload and other parameters""" + return self._api.search_v2(**kwargs) diff --git a/src/conductor/client/adapters/api/schema_resource_api_adapter.py b/src/conductor/client/adapters/api/schema_resource_api_adapter.py index 7884c01df..4473e7d47 100644 --- a/src/conductor/client/adapters/api/schema_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/schema_resource_api_adapter.py @@ -1,4 +1,30 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.schema_resource_api import SchemaResourceApi +from conductor.client.http.models.schema_def import SchemaDef + + +class SchemaResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = SchemaResourceApi(api_client) + + def delete_schema_by_name(self, name: str, **kwargs) -> None: + """Delete a schema by name""" + return self._api.delete_schema_by_name(name, **kwargs) + + def delete_schema_by_name_and_version(self, name: str, version: int, **kwargs) -> None: + """Delete a schema by name and version""" + return self._api.delete_schema_by_name_and_version(name, version, **kwargs) + + def get_all_schemas(self, **kwargs) -> List[SchemaDef]: + """Get all schemas""" + return self._api.get_all_schemas(**kwargs) + def get_schema_by_name_and_version(self, name: str, version: int, **kwargs) -> SchemaDef: + """Get a schema by name and version""" + return self._api.get_schema_by_name_and_version(name, version, **kwargs) -class SchemaResourceApiAdapter(SchemaResourceApi): ... + def save(self, body: List[SchemaDef], **kwargs) -> None: + """Save a schema""" + return self._api.save(body, **kwargs) diff --git a/src/conductor/client/adapters/api/secret_resource_api_adapter.py b/src/conductor/client/adapters/api/secret_resource_api_adapter.py index 090a63a21..b826d406b 100644 --- a/src/conductor/client/adapters/api/secret_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/secret_resource_api_adapter.py @@ -1,4 +1,61 @@ +from typing import Dict, List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.secret_resource_api import SecretResourceApi +from conductor.client.http.models.extended_secret import ExtendedSecret +from conductor.client.http.models.tag import Tag + + +class SecretResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = SecretResourceApi(api_client) + + def clear_local_cache(self, **kwargs) -> Dict[str, str]: + """Clear local cache""" + return self._api.clear_local_cache(**kwargs) + + def clear_redis_cache(self, **kwargs) -> Dict[str, str]: + """Clear redis cache""" + return self._api.clear_redis_cache(**kwargs) + + def delete_secret(self, key: str, **kwargs) -> object: + """Delete a secret""" + return self._api.delete_secret(key, **kwargs) + + def delete_tag_for_secret(self, body: List[Tag], key: str, **kwargs) -> None: + """Delete a tag for a secret""" + return self._api.delete_tag_for_secret(body, key, **kwargs) + + def get_secret(self, key: str, **kwargs) -> str: + """Get a secret""" + return self._api.get_secret(key, **kwargs) + + def get_tags(self, key: str, **kwargs) -> List[Tag]: + """Get tags for a secret""" + return self._api.get_tags(key, **kwargs) + + def list_all_secret_names(self, **kwargs) -> List[str]: + """List all secret names""" + return self._api.list_all_secret_names(**kwargs) + + def list_secrets_that_user_can_grant_access_to(self, **kwargs) -> List[str]: + """List secrets that user can grant access to""" + return self._api.list_secrets_that_user_can_grant_access_to(**kwargs) + + def list_secrets_with_tags_that_user_can_grant_access_to( + self, **kwargs + ) -> List[ExtendedSecret]: + """List secrets with tags that user can grant access to""" + return self._api.list_secrets_with_tags_that_user_can_grant_access_to(**kwargs) + + def put_secret(self, body: str, key: str, **kwargs) -> object: + """Put a secret""" + return self._api.put_secret(body, key, **kwargs) + def put_tag_for_secret(self, body: List[Tag], key: str, **kwargs) -> None: + """Put a tag for a secret""" + return self._api.put_tag_for_secret(body, key, **kwargs) -class SecretResourceApiAdapter(SecretResourceApi): ... + def secret_exists(self, key: str, **kwargs) -> object: + """Check if a secret exists""" + return self._api.secret_exists(key, **kwargs) diff --git a/src/conductor/client/adapters/api/service_registry_resource_api_adapter.py b/src/conductor/client/adapters/api/service_registry_resource_api_adapter.py index b381f2b45..04ba53f24 100644 --- a/src/conductor/client/adapters/api/service_registry_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/service_registry_resource_api_adapter.py @@ -1,4 +1,73 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.service_registry_resource_api import ServiceRegistryResourceApi +from conductor.client.http.models.circuit_breaker_transition_response import ( + CircuitBreakerTransitionResponse, +) +from conductor.client.http.models.proto_registry_entry import ProtoRegistryEntry +from conductor.client.http.models.service_method import ServiceMethod +from conductor.client.http.models.service_registry import ServiceRegistry + + +class ServiceRegistryResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = ServiceRegistryResourceApi(api_client) + + def get_registered_services(self, **kwargs) -> List[ServiceRegistry]: + """Get all registered services""" + return self._api.get_registered_services(**kwargs) + + def remove_service(self, name: str, **kwargs) -> None: + """Remove a service""" + return self._api.remove_service(name, **kwargs) + + def get_service(self, name: str, **kwargs) -> ServiceRegistry: + """Get a service""" + return self._api.get_service(name, **kwargs) + + def open_circuit_breaker(self, name: str, **kwargs) -> CircuitBreakerTransitionResponse: + """Open a circuit breaker""" + return self._api.open_circuit_breaker(name, **kwargs) + + def close_circuit_breaker(self, name: str, **kwargs) -> CircuitBreakerTransitionResponse: + """Close a circuit breaker""" + return self._api.close_circuit_breaker(name, **kwargs) + + def get_circuit_breaker_status(self, name: str, **kwargs) -> CircuitBreakerTransitionResponse: + """Get the status of a circuit breaker""" + return self._api.get_circuit_breaker_status(name, **kwargs) + + def add_or_update_service(self, body: ServiceRegistry, **kwargs) -> None: + """Add or update a service""" + return self._api.add_or_update_service(body, **kwargs) + + def add_or_update_method(self, registry_name: str, body: ServiceMethod, **kwargs) -> None: + """Add or update a method""" + return self._api.add_or_update_method(registry_name, body, **kwargs) + + def remove_method( + self, registry_name: str, service_name: str, method: str, method_type: str, **kwargs + ) -> None: + """Remove a method""" + return self._api.remove_method(registry_name, service_name, method, method_type, **kwargs) + + def get_proto_data(self, registry_name: str, filename: str, **kwargs) -> bytes: + """Get proto data""" + return self._api.get_proto_data(registry_name, filename, **kwargs) + + def set_proto_data(self, registry_name: str, filename: str, data: bytes, **kwargs) -> None: + """Set proto data""" + return self._api.set_proto_data(registry_name, filename, data, **kwargs) + + def delete_proto(self, registry_name: str, filename: str, **kwargs) -> None: + """Delete proto""" + return self._api.delete_proto(registry_name, filename, **kwargs) + def get_all_protos(self, registry_name: str, **kwargs) -> List[ProtoRegistryEntry]: + """Get all protos""" + return self._api.get_all_protos(registry_name, **kwargs) -class ServiceRegistryResourceApiAdapter(ServiceRegistryResourceApi): ... + def discover(self, name: str, **kwargs) -> List[ServiceMethod]: + """Discover a service""" + return self._api.discover(name, **kwargs) diff --git a/src/conductor/client/adapters/api/tags_api_adapter.py b/src/conductor/client/adapters/api/tags_api_adapter.py index 4684a8c01..212c05a72 100644 --- a/src/conductor/client/adapters/api/tags_api_adapter.py +++ b/src/conductor/client/adapters/api/tags_api_adapter.py @@ -1,4 +1,44 @@ +from conductor.client.adapters.api_client_adapter import ApiClientAdapter +from conductor.client.http.models.tag import Tag from conductor.client.orkes.api.tags_api import TagsApi -class TagsApiAdapter(TagsApi): ... +class TagsApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = TagsApi(api_client) + + def add_task_tag(self, body: Tag, task_name: str, **kwargs) -> object: + """Add a task tag""" + return self._api.add_task_tag(body, task_name, **kwargs) + + def add_workflow_tag(self, body: Tag, name: str, **kwargs) -> object: + """Add a workflow tag""" + return self._api.add_workflow_tag(body, name, **kwargs) + + def delete_task_tag(self, body: Tag, task_name: str, **kwargs) -> object: + """Delete a task tag""" + return self._api.delete_task_tag(body, task_name, **kwargs) + + def delete_workflow_tag(self, body: Tag, name: str, **kwargs) -> object: + """Delete a workflow tag""" + return self._api.delete_workflow_tag(body, name, **kwargs) + + def get_tags1(self, **kwargs) -> object: + """List all tags""" + return self._api.get_tags1(**kwargs) + + def get_task_tags(self, task_name: str, **kwargs) -> object: + """Get task tags""" + return self._api.get_task_tags(task_name, **kwargs) + + def get_workflow_tags(self, name: str, **kwargs) -> object: + """Get workflow tags""" + return self._api.get_workflow_tags(name, **kwargs) + + def set_task_tags(self, body: object, task_name: object, **kwargs) -> object: + """Set task tags""" + return self._api.set_task_tags(body, task_name, **kwargs) + + def set_workflow_tags(self, body: object, name: str, **kwargs) -> object: + """Set workflow tags""" + return self._api.set_workflow_tags(body, name, **kwargs) diff --git a/src/conductor/client/adapters/api/task_resource_api_adapter.py b/src/conductor/client/adapters/api/task_resource_api_adapter.py index e60bfc271..34a2a2500 100644 --- a/src/conductor/client/adapters/api/task_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/task_resource_api_adapter.py @@ -1,4 +1,97 @@ +from typing import Dict, List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.task_resource_api import TaskResourceApi +from conductor.client.http.models.poll_data import PollData +from conductor.client.http.models.search_result_task import SearchResultTask +from conductor.client.http.models.search_result_task_summary import SearchResultTaskSummary +from conductor.client.http.models.signal_response import SignalResponse +from conductor.client.http.models.task import Task +from conductor.client.http.models.task_exec_log import TaskExecLog +from conductor.client.http.models.task_result import TaskResult +from conductor.client.http.models.workflow import Workflow + + +class TaskResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = TaskResourceApi(api_client) + + def all(self, **kwargs) -> Dict[str, int]: + """Get the details about each queue""" + return self._api.all(**kwargs) + + def all_verbose(self, **kwargs) -> Dict[str, Dict[str, Dict[str, int]]]: + """Get the details about each queue with verbose information""" + return self._api.all_verbose(**kwargs) + + def batch_poll(self, tasktype: str, **kwargs) -> List[Task]: + """Batch poll for a task of a certain type""" + return self._api.batch_poll(tasktype, **kwargs) + + def get_all_poll_data(self, **kwargs) -> Dict[str, object]: + """Get the details about all poll data""" + return self._api.get_all_poll_data(**kwargs) + + def get_poll_data(self, task_type: str, **kwargs) -> List[PollData]: + """Get the details about poll data for a task type""" + return self._api.get_poll_data(task_type, **kwargs) + + def get_task(self, task_id: str, **kwargs) -> Task: + """Get a task by its ID""" + return self._api.get_task(task_id, **kwargs) + + def get_task_logs(self, task_id: str, **kwargs) -> List[TaskExecLog]: + """Get the logs for a task""" + return self._api.get_task_logs(task_id, **kwargs) + + def log(self, body: str, task_id: str, **kwargs) -> None: + """Log a message for a task""" + return self._api.log(body, task_id, **kwargs) + + def poll(self, tasktype: str, **kwargs) -> Task: + """Poll for a task of a certain type""" + return self._api.poll(tasktype, **kwargs) + + def requeue_pending_task(self, task_type: str, **kwargs) -> str: + """Requeue a pending task""" + return self._api.requeue_pending_task(task_type, **kwargs) + + def search1(self, **kwargs) -> SearchResultTaskSummary: + """Search for tasks""" + return self._api.search1(**kwargs) + + def search_v21(self, **kwargs) -> SearchResultTask: + """Search for tasks""" + return self._api.search_v21(**kwargs) + + def size(self, **kwargs) -> Dict[str, int]: + """Get the size of a task type""" + return self._api.size(**kwargs) + + def update_task(self, body: TaskResult, **kwargs) -> str: + """Update a task""" + return self._api.update_task(body, **kwargs) + + def update_task1( + self, body: Dict[str, object], workflow_id: str, task_ref_name: str, status: str, **kwargs + ) -> str: + """Update a task""" + return self._api.update_task1(body, workflow_id, task_ref_name, status, **kwargs) + + def update_task_sync( + self, body: Dict[str, object], workflow_id: str, task_ref_name: str, status: str, **kwargs + ) -> Workflow: + """Update a task synchronously""" + return self._api.update_task_sync(body, workflow_id, task_ref_name, status, **kwargs) + def signal_workflow_task_async( + self, workflow_id: str, status: str, body: Dict[str, object], **kwargs + ) -> None: + """Signal a workflow task asynchronously""" + return self._api.signal_workflow_task_async(workflow_id, status, body, **kwargs) -class TaskResourceApiAdapter(TaskResourceApi): ... + def signal_workflow_task_sync( + self, workflow_id: str, status: str, body: Dict[str, object], **kwargs + ) -> SignalResponse: + """Signal a workflow task synchronously""" + return self._api.signal_workflow_task_sync(workflow_id, status, body, **kwargs) diff --git a/src/conductor/client/adapters/api/token_resource_api_adapter.py b/src/conductor/client/adapters/api/token_resource_api_adapter.py index 5a789cabb..c5d1dc68d 100644 --- a/src/conductor/client/adapters/api/token_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/token_resource_api_adapter.py @@ -1,4 +1,17 @@ +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.token_resource_api import TokenResourceApi +from conductor.client.http.models.generate_token_request import GenerateTokenRequest +from conductor.client.http.models.response import Response -class TokenResourceApiAdapter(TokenResourceApi): ... +class TokenResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = TokenResourceApi(api_client) + + def generate_token(self, body: GenerateTokenRequest, **kwargs) -> Response: + """Generate a JWT token""" + return self._api.generate_token(body, **kwargs) + + def get_user_info(self, **kwargs) -> object: + """Get user information""" + return self._api.get_user_info(**kwargs) diff --git a/src/conductor/client/adapters/api/user_resource_api_adapter.py b/src/conductor/client/adapters/api/user_resource_api_adapter.py index 5c07baf91..5c56bc127 100644 --- a/src/conductor/client/adapters/api/user_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/user_resource_api_adapter.py @@ -1,27 +1,48 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.user_resource_api import UserResourceApi +from conductor.client.http.models.conductor_user import ConductorUser +from conductor.client.http.models.response import Response +from conductor.client.http.models.upsert_user_request import UpsertUserRequest + +class UserResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = UserResourceApi(api_client) -class UserResourceApiAdapter(UserResourceApi): - def get_granted_permissions(self, user_id, **kwargs): + def check_permissions(self, user_id: str, type: str, id: str, **kwargs) -> object: + """Check permissions for a user""" + return self._api.check_permissions(user_id, type, id, **kwargs) + + def get_granted_permissions(self, user_id: str, **kwargs) -> object: + """Get the permissions this user has over workflows and tasks""" # Convert empty user_id to None to prevent sending invalid data to server if not user_id: user_id = None - return super().get_granted_permissions(user_id=user_id, **kwargs) + return self._api.get_granted_permissions(user_id=user_id, **kwargs) - def get_user(self, id, **kwargs): + def get_user(self, id: str, **kwargs) -> object: + """Get a user by their ID""" # Convert empty user id to None to prevent sending invalid data to server if not id: id = None - return super().get_user(id=id, **kwargs) + return self._api.get_user(id=id, **kwargs) - def upsert_user(self, upsert_user_request, id, **kwargs): + def upsert_user(self, upsert_user_request: UpsertUserRequest, id: str, **kwargs) -> Response: + """Create or update a user""" # Convert empty user id to None to prevent sending invalid data to server if not id: id = None - return super().upsert_user(id=id, body=upsert_user_request, **kwargs) + return self._api.upsert_user(id=id, body=upsert_user_request, **kwargs) - def delete_user(self, id, **kwargs): + def delete_user(self, id: str, **kwargs) -> Response: + """Delete a user""" # Convert empty user id to None to prevent sending invalid data to server if not id: id = None - return super().delete_user(id=id, **kwargs) + return self._api.delete_user(id=id, **kwargs) + + def list_users(self, **kwargs) -> List[ConductorUser]: + """Get all users""" + return self._api.list_users(**kwargs) diff --git a/src/conductor/client/adapters/api/version_resource_api_adapter.py b/src/conductor/client/adapters/api/version_resource_api_adapter.py index 1c9e4a204..e5adbf9d7 100644 --- a/src/conductor/client/adapters/api/version_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/version_resource_api_adapter.py @@ -1,4 +1,11 @@ +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.version_resource_api import VersionResourceApi -class VersionResourceApiAdapter(VersionResourceApi): ... +class VersionResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = VersionResourceApi(api_client) + + def get_version(self, **kwargs) -> str: + """Get the server's version""" + return self._api.get_version(**kwargs) diff --git a/src/conductor/client/adapters/api/webhooks_config_resource_api_adapter.py b/src/conductor/client/adapters/api/webhooks_config_resource_api_adapter.py index 2cf6d5c78..616967724 100644 --- a/src/conductor/client/adapters/api/webhooks_config_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/webhooks_config_resource_api_adapter.py @@ -1,4 +1,43 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.webhooks_config_resource_api import WebhooksConfigResourceApi +from conductor.client.http.models.tag import Tag +from conductor.client.http.models.webhook_config import WebhookConfig + + +class WebhooksConfigResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = WebhooksConfigResourceApi(api_client) + + def create_webhook(self, body: WebhookConfig, **kwargs) -> WebhookConfig: + """Create a webhook""" + return self._api.create_webhook(body, **kwargs) + + def delete_tag_for_webhook(self, body: List[Tag], **kwargs) -> None: + """Delete a tag for webhook id""" + return self._api.delete_tag_for_webhook(body, **kwargs) + + def delete_webhook(self, id: str, **kwargs) -> None: + """Delete a webhook""" + return self._api.delete_webhook(id, **kwargs) + + def get_all_webhook(self, **kwargs) -> List[WebhookConfig]: + """Get all webhooks""" + return self._api.get_all_webhook(**kwargs) + + def get_tags_for_webhook(self, id: str, **kwargs) -> List[Tag]: + """Get tags for webhook id""" + return self._api.get_tags_for_webhook(id, **kwargs) + + def get_webhook(self, id: str, **kwargs) -> WebhookConfig: + """Get a webhook by id""" + return self._api.get_webhook(id, **kwargs) + def put_tag_for_webhook(self, body: List[Tag], id: str, **kwargs) -> None: + """Put a tag for webhook id""" + return self._api.put_tag_for_webhook(body, id, **kwargs) -class WebhooksConfigResourceApiAdapter(WebhooksConfigResourceApi): ... + def update_webhook(self, body: WebhookConfig, id: str, **kwargs) -> WebhookConfig: + """Update a webhook""" + return self._api.update_webhook(body, id, **kwargs) diff --git a/src/conductor/client/adapters/api/workflow_bulk_resource_api_adapter.py b/src/conductor/client/adapters/api/workflow_bulk_resource_api_adapter.py index 544ad227b..5776a1878 100644 --- a/src/conductor/client/adapters/api/workflow_bulk_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/workflow_bulk_resource_api_adapter.py @@ -1,4 +1,34 @@ +from typing import List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.workflow_bulk_resource_api import WorkflowBulkResourceApi +from conductor.client.http.models.bulk_response import BulkResponse + + +class WorkflowBulkResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = WorkflowBulkResourceApi(api_client) + + def delete(self, body: List[str], **kwargs) -> BulkResponse: + """Permanently remove workflows from the system""" + return self._api.delete(body, **kwargs) + + def pause_workflow1(self, body: List[str], **kwargs) -> BulkResponse: + """Pause a workflow""" + return self._api.pause_workflow1(body, **kwargs) + + def restart1(self, body: List[str], **kwargs) -> BulkResponse: + """Restart a workflow""" + return self._api.restart1(body, **kwargs) + + def resume_workflow1(self, body: List[str], **kwargs) -> BulkResponse: + """Resume a workflow""" + return self._api.resume_workflow1(body, **kwargs) + def retry1(self, body: List[str], **kwargs) -> BulkResponse: + """Retry a workflow""" + return self._api.retry1(body, **kwargs) -class WorkflowBulkResourceApiAdapter(WorkflowBulkResourceApi): ... + def terminate(self, body: List[str], **kwargs) -> BulkResponse: + """Terminate a workflow""" + return self._api.terminate(body, **kwargs) diff --git a/src/conductor/client/adapters/api/workflow_resource_api_adapter.py b/src/conductor/client/adapters/api/workflow_resource_api_adapter.py index e306da766..5e573b6ac 100644 --- a/src/conductor/client/adapters/api/workflow_resource_api_adapter.py +++ b/src/conductor/client/adapters/api/workflow_resource_api_adapter.py @@ -1,4 +1,157 @@ +from typing import Dict, List + +from conductor.client.adapters.api_client_adapter import ApiClientAdapter from conductor.client.codegen.api.workflow_resource_api import WorkflowResourceApi +from conductor.client.http.models.correlation_ids_search_request import CorrelationIdsSearchRequest +from conductor.client.http.models.rerun_workflow_request import RerunWorkflowRequest +from conductor.client.http.models.scrollable_search_result_workflow_summary import ( + ScrollableSearchResultWorkflowSummary, +) +from conductor.client.http.models.skip_task_request import SkipTaskRequest +from conductor.client.http.models.start_workflow_request import StartWorkflowRequest +from conductor.client.http.models.task_list_search_result_summary import TaskListSearchResultSummary +from conductor.client.http.models.upgrade_workflow_request import UpgradeWorkflowRequest +from conductor.client.http.models.workflow import Workflow +from conductor.client.http.models.workflow_run import WorkflowRun +from conductor.client.http.models.workflow_state_update import WorkflowStateUpdate +from conductor.client.http.models.workflow_status import WorkflowStatus +from conductor.client.http.models.workflow_test_request import WorkflowTestRequest + + +class WorkflowResourceApiAdapter: + def __init__(self, api_client: ApiClientAdapter): + self._api = WorkflowResourceApi(api_client) + + def decide(self, workflow_id: str, **kwargs) -> None: + """Starts the decision task for a workflow""" + return self._api.decide(workflow_id, **kwargs) + + def delete1(self, workflow_id: str, **kwargs) -> None: + """Removes the workflow from the system""" + return self._api.delete1(workflow_id, **kwargs) + + def execute_workflow( + self, body: StartWorkflowRequest, request_id: str, name: str, version: int, **kwargs + ) -> WorkflowRun: + """Execute a workflow synchronously""" + return self._api.execute_workflow(body, request_id, name, version, **kwargs) + + def execute_workflow_as_api( + self, body: Dict[str, object], name: str, **kwargs + ) -> Dict[str, object]: + """Execute a workflow as API""" + return self._api.execute_workflow_as_api(body, name, **kwargs) + + def execute_workflow_as_get_api(self, name: str, **kwargs) -> Dict[str, object]: + """Execute a workflow as GET API""" + return self._api.execute_workflow_as_get_api(name, **kwargs) + + def get_execution_status(self, workflow_id: str, **kwargs) -> Workflow: + """Get the execution status of a workflow""" + return self._api.get_execution_status(workflow_id, **kwargs) + + def get_execution_status_task_list( + self, workflow_id: str, **kwargs + ) -> TaskListSearchResultSummary: + """Get the execution status task list of a workflow""" + return self._api.get_execution_status_task_list(workflow_id, **kwargs) + + def get_running_workflow(self, name: str, **kwargs) -> List[str]: + """Get the running workflows""" + return self._api.get_running_workflow(name, **kwargs) + + def get_workflow_status_summary(self, workflow_id: str, **kwargs) -> WorkflowStatus: + """Get the workflow status summary""" + return self._api.get_workflow_status_summary(workflow_id, **kwargs) + + def get_workflows(self, body: List[str], name: str, **kwargs) -> Dict[str, List[Workflow]]: + """Get the workflows""" + return self._api.get_workflows(body, name, **kwargs) + + def get_workflows1( + self, body: CorrelationIdsSearchRequest, **kwargs + ) -> Dict[str, List[Workflow]]: + """Get the workflows""" + return self._api.get_workflows1(body, **kwargs) + + def get_workflows2(self, name: str, correlation_id: str, **kwargs) -> List[Workflow]: + """Lists workflows for the given correlation id""" + return self._api.get_workflows2(name, correlation_id, **kwargs) + + def jump_to_task(self, body: Dict[str, object], workflow_id: str, **kwargs) -> None: + """Jump workflow execution to given task""" + return self._api.jump_to_task(body, workflow_id, **kwargs) + + def pause_workflow(self, workflow_id: str, **kwargs) -> None: + """Pauses the workflow""" + return self._api.pause_workflow(workflow_id, **kwargs) + + def rerun(self, body: RerunWorkflowRequest, workflow_id: str, **kwargs) -> str: + """Reruns the workflow from a specific task""" + return self._api.rerun(body, workflow_id, **kwargs) + + def reset_workflow(self, workflow_id: str, **kwargs) -> None: + """Resets callback times of all non-terminal SIMPLE tasks to 0""" + return self._api.reset_workflow(workflow_id, **kwargs) + + def restart(self, workflow_id: str, **kwargs) -> None: + """Restarts a completed workflow""" + return self._api.restart(workflow_id, **kwargs) + + def resume_workflow(self, workflow_id: str, **kwargs) -> None: + """Resumes the workflow""" + return self._api.resume_workflow(workflow_id, **kwargs) + + def retry(self, workflow_id: str, **kwargs) -> None: + """Retries the last failed task""" + return self._api.retry(workflow_id, **kwargs) + + def search(self, **kwargs) -> ScrollableSearchResultWorkflowSummary: + """Search for workflows based on payload and other parameters""" + return self._api.search(**kwargs) + + def skip_task_from_workflow( + self, body: SkipTaskRequest, workflow_id: str, task_reference_name: str, **kwargs + ) -> None: + """Skips a given task from a current running workflow""" + return self._api.skip_task_from_workflow(body, workflow_id, task_reference_name, **kwargs) + + def start_workflow(self, body: StartWorkflowRequest, **kwargs) -> str: + """Starts a workflow""" + return self._api.start_workflow(body, **kwargs) + + def start_workflow1(self, body: Dict[str, object], name: str, **kwargs) -> str: + """Starts a workflow""" + return self._api.start_workflow1(body, name, **kwargs) + + def terminate1(self, workflow_id: str, **kwargs) -> None: + """Terminates a workflow""" + return self._api.terminate1(workflow_id, **kwargs) + + def test_workflow(self, body: WorkflowTestRequest, **kwargs) -> Workflow: + """Tests a workflow""" + return self._api.test_workflow(body, **kwargs) + + def update_workflow_and_task_state( + self, body: WorkflowStateUpdate, request_id: str, workflow_id: str, **kwargs + ) -> WorkflowRun: + """Update a workflow state by updating variables or in progress task""" + return self._api.update_workflow_and_task_state(body, request_id, workflow_id, **kwargs) + + def update_workflow_state( + self, body: Dict[str, object], workflow_id: str, **kwargs + ) -> Workflow: + """Update workflow variables""" + return self._api.update_workflow_state(body, workflow_id, **kwargs) + def upgrade_running_workflow_to_version( + self, body: UpgradeWorkflowRequest, workflow_id: str, **kwargs + ) -> None: + """Upgrade running workflow to newer version""" + return self._api.upgrade_running_workflow_to_version(body, workflow_id, **kwargs) -class WorkflowResourceApiAdapter(WorkflowResourceApi): ... + def execute_workflow_with_return_strategy( + self, body: StartWorkflowRequest, name: str, version: int, **kwargs + ) -> WorkflowRun: + """Execute a workflow synchronously with reactive response""" + return self._api.execute_workflow_with_return_strategy(body, name, version, **kwargs) diff --git a/src/conductor/client/adapters/models/__init__.py b/src/conductor/client/adapters/models/__init__.py index 32373812b..b18bff011 100644 --- a/src/conductor/client/adapters/models/__init__.py +++ b/src/conductor/client/adapters/models/__init__.py @@ -83,8 +83,6 @@ from conductor.client.adapters.models.event_log_adapter import EventLogAdapter as EventLog from conductor.client.adapters.models.extended_conductor_application_adapter import ( ExtendedConductorApplicationAdapter as ConductorApplication, -) -from conductor.client.adapters.models.extended_conductor_application_adapter import ( ExtendedConductorApplicationAdapter as ExtendedConductorApplication, ) from conductor.client.adapters.models.extended_event_execution_adapter import ( @@ -239,15 +237,17 @@ ) from conductor.client.adapters.models.request_param_adapter import ( RequestParamAdapter as RequestParam, + SchemaAdapter as Schema, ) -from conductor.client.adapters.models.request_param_adapter import SchemaAdapter as Schema from conductor.client.adapters.models.rerun_workflow_request_adapter import ( RerunWorkflowRequestAdapter as RerunWorkflowRequest, ) from conductor.client.adapters.models.response_adapter import ResponseAdapter as Response from conductor.client.adapters.models.role_adapter import RoleAdapter as Role -from conductor.client.adapters.models.schema_def_adapter import SchemaDefAdapter as SchemaDef -from conductor.client.adapters.models.schema_def_adapter import SchemaType +from conductor.client.adapters.models.schema_def_adapter import ( + SchemaDefAdapter as SchemaDef, + SchemaType, +) from conductor.client.adapters.models.scrollable_search_result_workflow_summary_adapter import ( ScrollableSearchResultWorkflowSummaryAdapter as ScrollableSearchResultWorkflowSummary, ) @@ -257,11 +257,9 @@ from conductor.client.adapters.models.service_method_adapter import ( ServiceMethodAdapter as ServiceMethod, ) -from conductor.client.adapters.models.service_registry_adapter import ConfigAdapter as Config from conductor.client.adapters.models.service_registry_adapter import ( + ConfigAdapter as Config, OrkesCircuitBreakerConfigAdapter as OrkesCircuitBreakerConfig, -) -from conductor.client.adapters.models.service_registry_adapter import ( ServiceRegistryAdapter as ServiceRegistry, ) from conductor.client.adapters.models.signal_response_adapter import ( diff --git a/src/conductor/client/adapters/rest_adapter.py b/src/conductor/client/adapters/rest_adapter.py index a5e430a73..7eefbd95b 100644 --- a/src/conductor/client/adapters/rest_adapter.py +++ b/src/conductor/client/adapters/rest_adapter.py @@ -106,7 +106,7 @@ def __init__(self, connection: Optional[httpx.Client] = None, configuration=None client_kwargs["verify"] = ssl_context - self.connection = httpx.Client(**client_kwargs) + self.connection = httpx.Client(**client_kwargs) # type: ignore[arg-type] def close(self): """Close the HTTP client connection.""" diff --git a/src/conductor/client/ai/orchestrator.py b/src/conductor/client/ai/orchestrator.py index eaccb635a..c882b7299 100644 --- a/src/conductor/client/ai/orchestrator.py +++ b/src/conductor/client/ai/orchestrator.py @@ -9,7 +9,7 @@ if TYPE_CHECKING: from conductor.client.configuration.configuration import Configuration - from conductor.client.http.models import PromptTemplate + from conductor.client.http.models.message_template import MessageTemplate from conductor.shared.ai.configuration.interfaces.integration_config import IntegrationConfig from conductor.shared.ai.enums import LLMProvider, VectorDB @@ -35,9 +35,9 @@ def add_prompt_template(self, name: str, prompt_template: str, description: str) self.prompt_client.save_prompt(name, description, prompt_template) return self - def get_prompt_template(self, template_name: str) -> Optional[PromptTemplate]: + def get_prompt_template(self, template_name: str, **kwargs) -> Optional[MessageTemplate]: try: - return self.prompt_client.get_prompt(template_name) + return self.prompt_client.get_prompt(template_name, **kwargs) except ApiException as e: if e.code == NOT_FOUND_STATUS: return None diff --git a/src/conductor/client/authorization_client.py b/src/conductor/client/authorization_client.py index 9fc2f3b3d..69eabd76a 100644 --- a/src/conductor/client/authorization_client.py +++ b/src/conductor/client/authorization_client.py @@ -8,6 +8,7 @@ from conductor.client.http.models.create_or_update_application_request import ( CreateOrUpdateApplicationRequest, ) +from conductor.client.http.models.extended_conductor_application import ExtendedConductorApplication from conductor.client.http.models.group import Group from conductor.client.http.models.subject_ref import SubjectRef from conductor.client.http.models.target_ref import TargetRef @@ -33,7 +34,7 @@ def get_application(self, application_id: str) -> ConductorApplication: pass @abstractmethod - def list_applications(self) -> List[ConductorApplication]: + def list_applications(self) -> List[ExtendedConductorApplication]: pass @abstractmethod diff --git a/src/conductor/client/configuration/configuration.py b/src/conductor/client/configuration/configuration.py index 57a8b2543..119310984 100644 --- a/src/conductor/client/configuration/configuration.py +++ b/src/conductor/client/configuration/configuration.py @@ -5,8 +5,8 @@ import os import re import time -from typing import Dict, Optional, Union import warnings +from typing import Dict, Optional, Union from conductor.shared.configuration.settings.authentication_settings import ( AuthenticationSettings, @@ -144,7 +144,7 @@ def __init__( if key is not None and secret is not None: self.authentication_settings = AuthenticationSettings(key_id=key, key_secret=secret) else: - self.authentication_settings = None + self.authentication_settings = None # type: ignore[assignment] # Debug switch self.debug = debug diff --git a/src/conductor/client/event/event_client.py b/src/conductor/client/event/event_client.py index 5d345297b..a7cc57e93 100644 --- a/src/conductor/client/event/event_client.py +++ b/src/conductor/client/event/event_client.py @@ -1,9 +1,9 @@ from __future__ import annotations -from typing import List +from typing import Dict, List -from conductor.client.event.queue.queue_configuration import QueueConfiguration from conductor.client.adapters.models.tag_adapter import TagAdapter +from conductor.client.event.queue.queue_configuration import QueueConfiguration from conductor.client.http.api import EventResourceApi from conductor.client.http.api_client import ApiClient @@ -18,13 +18,13 @@ def delete_queue_configuration(self, queue_configuration: QueueConfiguration) -> queue_type=queue_configuration.queue_type, ) - def get_kafka_queue_configuration(self, queue_topic: str) -> QueueConfiguration: + def get_kafka_queue_configuration(self, queue_topic: str) -> Dict[str, object]: return self.get_queue_configuration( queue_type="kafka", queue_name=queue_topic, ) - def get_queue_configuration(self, queue_type: str, queue_name: str) -> QueueConfiguration: + def get_queue_configuration(self, queue_type: str, queue_name: str) -> Dict[str, object]: return self.client.get_queue_config(queue_type, queue_name) def put_queue_configuration(self, queue_configuration: QueueConfiguration): diff --git a/src/conductor/client/integration_client.py b/src/conductor/client/integration_client.py index 7995e6c2e..645b73608 100644 --- a/src/conductor/client/integration_client.py +++ b/src/conductor/client/integration_client.py @@ -7,7 +7,7 @@ from conductor.client.http.models.integration_api import IntegrationApi from conductor.client.http.models.integration_api_update import IntegrationApiUpdate from conductor.client.http.models.integration_update import IntegrationUpdate -from conductor.client.http.models.prompt_template import PromptTemplate +from conductor.client.http.models.message_template import MessageTemplate class IntegrationClient(ABC): @@ -67,7 +67,7 @@ def get_integrations(self) -> List[Integration]: @abstractmethod def get_prompts_with_integration( self, ai_integration: str, model_name: str - ) -> List[PromptTemplate]: ... + ) -> List[MessageTemplate]: ... @abstractmethod def get_token_usage_for_integration(self, name, integration_name) -> int: ... diff --git a/src/conductor/client/metadata_client.py b/src/conductor/client/metadata_client.py index 21508accb..da92692c7 100644 --- a/src/conductor/client/metadata_client.py +++ b/src/conductor/client/metadata_client.py @@ -3,6 +3,7 @@ from abc import ABC, abstractmethod from typing import List, Optional +from conductor.client.http.models.extended_workflow_def import ExtendedWorkflowDef from conductor.client.http.models.task_def import TaskDef from conductor.client.http.models.workflow_def import WorkflowDef from conductor.client.orkes.models.metadata_tag import MetadataTag @@ -10,15 +11,19 @@ class MetadataClient(ABC): @abstractmethod - def register_workflow_def(self, workflow_def: WorkflowDef, overwrite: Optional[bool]): + def register_workflow_def( + self, workflow_def: ExtendedWorkflowDef, overwrite: Optional[bool] = True + ) -> None: pass @abstractmethod - def update_workflow_def(self, workflow_def: WorkflowDef, overwrite: Optional[bool]): + def update_workflow_def( + self, workflow_def: ExtendedWorkflowDef, overwrite: Optional[bool] = True, **kwargs + ) -> None: pass @abstractmethod - def unregister_workflow_def(self, workflow_name: str, version: int): + def unregister_workflow_def(self, name: str, version: int) -> None: pass @abstractmethod diff --git a/src/conductor/client/orkes/models/access_key.py b/src/conductor/client/orkes/models/access_key.py index 318bde767..ec325ce4b 100644 --- a/src/conductor/client/orkes/models/access_key.py +++ b/src/conductor/client/orkes/models/access_key.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Optional +from typing import Any, Dict, Optional from conductor.client.orkes.models.access_key_status import AccessKeyStatus @@ -69,3 +69,11 @@ def __eq__(self, other: object) -> bool: def __ne__(self, other: object) -> bool: """Returns true if both objects are not equal""" return not self == other + + @classmethod + def from_dict(cls, obj: Dict[str, Any]) -> "AccessKey": + return cls( + id=obj["id"], + status=obj["status"], + created_at=obj["createdAt"], + ) diff --git a/src/conductor/client/orkes/models/created_access_key.py b/src/conductor/client/orkes/models/created_access_key.py index 3eb020dc3..1de228b27 100644 --- a/src/conductor/client/orkes/models/created_access_key.py +++ b/src/conductor/client/orkes/models/created_access_key.py @@ -1,3 +1,6 @@ +from typing import Any, Dict + + class CreatedAccessKey: def __init__(self, id: str, secret: str) -> None: self._id: str = id @@ -49,3 +52,10 @@ def __eq__(self, other: object) -> bool: def __ne__(self, other: object) -> bool: """Returns true if both objects are not equal""" return not self == other + + @classmethod + def from_dict(cls, obj: Dict[str, Any]) -> "CreatedAccessKey": + return cls( + id=obj["id"], + secret=obj["secret"], + ) diff --git a/src/conductor/client/orkes/orkes_authorization_client.py b/src/conductor/client/orkes/orkes_authorization_client.py index 2b71f5916..4d7d1ba9d 100644 --- a/src/conductor/client/orkes/orkes_authorization_client.py +++ b/src/conductor/client/orkes/orkes_authorization_client.py @@ -1,9 +1,10 @@ from __future__ import annotations -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional, cast from conductor.client.authorization_client import AuthorizationClient from conductor.client.configuration.configuration import Configuration +from conductor.client.http.models import ExtendedConductorApplication from conductor.client.http.models.authorization_request import AuthorizationRequest from conductor.client.http.models.conductor_application import ConductorApplication from conductor.client.http.models.conductor_user import ConductorUser @@ -25,159 +26,809 @@ class OrkesAuthorizationClient(OrkesBaseClient, AuthorizationClient): def __init__(self, configuration: Configuration): - super(OrkesAuthorizationClient, self).__init__(configuration) + """Initialize the OrkesAuthorizationClient with configuration. + + Args: + configuration: Configuration object containing server settings and authentication + + Example: + ```python + from conductor.client.configuration.configuration import Configuration + + config = Configuration(server_api_url="http://localhost:8080/api") + auth_client = OrkesAuthorizationClient(config) + ``` + """ + super().__init__(configuration) # Applications def create_application( - self, create_or_update_application_request: CreateOrUpdateApplicationRequest + self, create_or_update_application_request: CreateOrUpdateApplicationRequest, **kwargs ) -> ConductorApplication: - app_obj = self.applicationResourceApi.create_application( - create_or_update_application_request + """Create a new application. + + Args: + create_or_update_application_request: Application details including name and owner + **kwargs: Additional optional parameters to pass to the API + + Returns: + ConductorApplication instance containing the created application details + + Example: + ```python + from conductor.client.http.models.create_or_update_application_request import CreateOrUpdateApplicationRequest + + request = CreateOrUpdateApplicationRequest( + name="My Application", + owner="engineering-team" + ) + app = auth_client.create_application(request) + print(f"Created application: {app.name} with ID: {app.id}") + ``` + """ + app_obj = self._application_api.create_application( + body=create_or_update_application_request, **kwargs ) return self.api_client.deserialize_class(app_obj, "ConductorApplication") - def get_application(self, application_id: str) -> ConductorApplication: - app_obj = self.applicationResourceApi.get_application(application_id) + def get_application(self, application_id: str, **kwargs) -> ConductorApplication: + """Get an application by ID. + + Args: + application_id: Unique identifier for the application + **kwargs: Additional optional parameters to pass to the API + + Returns: + ConductorApplication instance containing the application details + + Example: + ```python + app = auth_client.get_application("app-123") + print(f"Application: {app.name}, Owner: {app.owner}") + ``` + """ + app_obj = self._application_api.get_application(id=application_id, **kwargs) return self.api_client.deserialize_class(app_obj, "ConductorApplication") - def get_app_by_access_key_id(self, access_key_id: str) -> ConductorApplication: - app_obj = self.applicationResourceApi.get_app_by_access_key_id(access_key_id) + def get_app_by_access_key_id(self, access_key_id: str, **kwargs) -> ConductorApplication: + """Get an application by its access key ID. + + Args: + access_key_id: Unique identifier for the access key + **kwargs: Additional optional parameters to pass to the API + + Returns: + ConductorApplication instance for the application + + Example: + ```python + app = auth_client.get_app_by_access_key_id("key-123") + print(f"Application: {app.name}, Owner: {app.owner}") + ``` + """ + app_obj = self._application_api.get_app_by_access_key_id( + access_key_id=access_key_id, **kwargs + ) return self.api_client.deserialize_class(app_obj, "ConductorApplication") - def list_applications(self) -> List[ConductorApplication]: - return self.applicationResourceApi.list_applications() + def list_applications(self, **kwargs) -> List[ExtendedConductorApplication]: + """List all applications in the system. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of ExtendedConductorApplication instances representing all applications + + Example: + ```python + apps = auth_client.list_applications() + for app in apps: + print(f"Application: {app.name}, ID: {app.id}") + ``` + """ + return self._application_api.list_applications(**kwargs) def update_application( self, create_or_update_application_request: CreateOrUpdateApplicationRequest, application_id: str, + **kwargs, ) -> ConductorApplication: - app_obj = self.applicationResourceApi.update_application( - create_or_update_application_request, application_id + """Update an existing application. + + Args: + create_or_update_application_request: Updated application details + application_id: Unique identifier for the application to update + **kwargs: Additional optional parameters to pass to the API + + Returns: + ConductorApplication instance containing the updated application details + + Example: + ```python + from conductor.client.http.models.create_or_update_application_request import CreateOrUpdateApplicationRequest + + request = CreateOrUpdateApplicationRequest( + name="Updated Application Name", + owner="new-owner-team" + ) + app = auth_client.update_application(request, "app-123") + print(f"Updated application: {app.name}") + ``` + """ + app_obj = self._application_api.update_application( + body=create_or_update_application_request, id=application_id, **kwargs ) return self.api_client.deserialize_class(app_obj, "ConductorApplication") - def delete_application(self, application_id: str): - self.applicationResourceApi.delete_application(application_id) + def delete_application(self, application_id: str, **kwargs) -> None: + """Delete an application by ID. + + Args: + application_id: Unique identifier for the application to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + auth_client.delete_application("app-123") + ``` + """ + self._application_api.delete_application(id=application_id, **kwargs) + + def add_role_to_application_user(self, application_id: str, role: str, **kwargs) -> None: + """Add a role to an application user. + + Args: + application_id: Unique identifier for the application + role: Role name to add (e.g., "ADMIN", "USER") + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + auth_client.add_role_to_application_user("app-123", "ADMIN") + ``` + """ + self._application_api.add_role_to_application_user( + application_id=application_id, role=role, **kwargs + ) + + def remove_role_from_application_user(self, application_id: str, role: str, **kwargs) -> None: + """Remove a role from an application user. - def add_role_to_application_user(self, application_id: str, role: str): - self.applicationResourceApi.add_role_to_application_user(application_id, role) + Args: + application_id: Unique identifier for the application + role: Role name to remove + **kwargs: Additional optional parameters to pass to the API - def remove_role_from_application_user(self, application_id: str, role: str): - self.applicationResourceApi.remove_role_from_application_user(application_id, role) + Returns: + None - def set_application_tags(self, tags: List[MetadataTag], application_id: str): - self.applicationResourceApi.put_tag_for_application(tags, application_id) + Example: + ```python + auth_client.remove_role_from_application_user("app-123", "ADMIN") + ``` + """ + self._application_api.remove_role_from_application_user( + application_id=application_id, role=role, **kwargs + ) - def get_application_tags(self, application_id: str) -> List[MetadataTag]: - return self.applicationResourceApi.get_tags_for_application(application_id) + def set_application_tags(self, tags: List[MetadataTag], application_id: str, **kwargs) -> None: + """Set tags for an application, replacing any existing tags. + + Args: + tags: List of tags to set for the application + application_id: Unique identifier for the application + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.orkes.models.metadata_tag import MetadataTag + + tags = [ + MetadataTag(key="environment", value="production"), + MetadataTag(key="team", value="platform") + ] + auth_client.set_application_tags(tags, "app-123") + ``` + """ + self._application_api.put_tag_for_application(tag=tags, id=application_id, **kwargs) + + def get_application_tags(self, application_id: str, **kwargs) -> List[MetadataTag]: + """Get all tags associated with an application. + + Args: + application_id: Unique identifier for the application + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of MetadataTag instances + + Example: + ```python + tags = auth_client.get_application_tags("app-123") + for tag in tags: + print(f"Tag: {tag.key}={tag.value}") + ``` + """ + return self._application_api.get_tags_for_application( + application_id=application_id, **kwargs + ) - def delete_application_tags(self, tags: List[MetadataTag], application_id: str): - self.applicationResourceApi.delete_tag_for_application(tags, application_id) + def delete_application_tags( + self, tags: List[MetadataTag], application_id: str, **kwargs + ) -> None: + """Delete specific tags from an application. + + Args: + tags: List of tags to delete + application_id: Unique identifier for the application + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.orkes.models.metadata_tag import MetadataTag + + tags_to_delete = [MetadataTag(key="environment", value="staging")] + auth_client.delete_application_tags(tags_to_delete, "app-123") + ``` + """ + self._application_api.delete_tag_for_application(tag=tags, id=application_id, **kwargs) + + def create_access_key(self, application_id: str, **kwargs) -> CreatedAccessKey: + """Create a new access key for an application. + + Args: + application_id: Unique identifier for the application + **kwargs: Additional optional parameters to pass to the API + + Returns: + CreatedAccessKey instance with the new key details (includes secret) + + Example: + ```python + key = auth_client.create_access_key("app-123") + print(f"Access Key ID: {key.id}") + print(f"Secret: {key.secret}") # Store securely, won't be shown again + ``` + """ + key_obj = self._application_api.create_access_key(id=application_id, **kwargs) + + key_obj_dict = cast(Dict[str, Any], key_obj) + result_model = CreatedAccessKey.from_dict(key_obj_dict) + + return result_model + + def get_access_keys(self, application_id: str, **kwargs) -> List[AccessKey]: + """Get all access keys for an application. + + Args: + application_id: Unique identifier for the application + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of AccessKey instances (secrets are not included) + + Example: + ```python + keys = auth_client.get_access_keys("app-123") + for key in keys: + print(f"Key ID: {key.id}, Status: {key.status}") + ``` + """ + access_keys_obj = self._application_api.get_access_keys(id=application_id, **kwargs) + + access_keys_dict = cast(List[Dict[str, Any]], access_keys_obj) + result_model = [AccessKey.from_dict(_item) for _item in access_keys_dict] + + return result_model + + def toggle_access_key_status(self, application_id: str, key_id: str, **kwargs) -> AccessKey: + """Toggle the status of an access key (enable/disable). + + Args: + application_id: Unique identifier for the application + key_id: Unique identifier for the access key + **kwargs: Additional optional parameters to pass to the API + + Returns: + AccessKey instance with updated status + + Example: + ```python + # Toggle key status (enable if disabled, disable if enabled) + key = auth_client.toggle_access_key_status("app-123", "key-456") + print(f"New status: {key.status}") + ``` + """ + key_obj = self._application_api.toggle_access_key_status( + application_id=application_id, key_id=key_id, **kwargs + ) - def create_access_key(self, application_id: str) -> CreatedAccessKey: - key_obj = self.applicationResourceApi.create_access_key(application_id) - created_access_key = CreatedAccessKey(key_obj["id"], key_obj["secret"]) - return created_access_key + key_obj_dict = cast(Dict[str, Any], key_obj) + result_model = AccessKey.from_dict(key_obj_dict) - def get_access_keys(self, application_id: str) -> List[AccessKey]: - access_keys_obj = self.applicationResourceApi.get_access_keys(application_id) + return result_model - access_keys = [] - for key_obj in access_keys_obj: - access_key = AccessKey(key_obj["id"], key_obj["status"], key_obj["createdAt"]) - access_keys.append(access_key) + def delete_access_key(self, application_id: str, key_id: str, **kwargs) -> None: + """Delete an access key for an application. - return access_keys + Args: + application_id: Unique identifier for the application + key_id: Unique identifier for the access key + **kwargs: Additional optional parameters to pass to the API - def toggle_access_key_status(self, application_id: str, key_id: str) -> AccessKey: - key_obj = self.applicationResourceApi.toggle_access_key_status(application_id, key_id) - return AccessKey(key_obj["id"], key_obj["status"], key_obj["createdAt"]) + Returns: + None - def delete_access_key(self, application_id: str, key_id: str): - self.applicationResourceApi.delete_access_key(application_id, key_id) + Example: + ```python + auth_client.delete_access_key("app-123", "key-456") + ``` + """ + self._application_api.delete_access_key( + application_id=application_id, key_id=key_id, **kwargs + ) # Users - - def upsert_user(self, upsert_user_request: UpsertUserRequest, user_id: str) -> ConductorUser: - user_obj = self.userResourceApi.upsert_user(upsert_user_request, user_id) + def upsert_user( + self, upsert_user_request: UpsertUserRequest, user_id: str, **kwargs + ) -> ConductorUser: + """Create or update a user. + + Args: + upsert_user_request: User details including name, email, and groups + user_id: Unique identifier for the user + **kwargs: Additional optional parameters to pass to the API + + Returns: + ConductorUser instance containing the user details + + Example: + ```python + from conductor.client.http.models.upsert_user_request import UpsertUserRequest + + request = UpsertUserRequest( + name="John Doe", + email="john.doe@example.com", + groups=["engineering", "admin"] + ) + user = auth_client.upsert_user(request, "user-123") + print(f"User: {user.name}") + ``` + """ + user_obj = self._user_api.upsert_user( + upsert_user_request=upsert_user_request, id=user_id, **kwargs + ) return self.api_client.deserialize_class(user_obj, "ConductorUser") - def get_user(self, user_id: str) -> ConductorUser: - user_obj = self.userResourceApi.get_user(user_id) + def get_user(self, user_id: str, **kwargs) -> ConductorUser: + """Get a user by ID. + + Args: + user_id: Unique identifier for the user + **kwargs: Additional optional parameters to pass to the API + + Returns: + ConductorUser instance containing the user details + + Example: + ```python + user = auth_client.get_user("user-123") + print(f"User: {user.name}, Email: {user.email}") + ``` + """ + user_obj = self._user_api.get_user(id=user_id, **kwargs) return self.api_client.deserialize_class(user_obj, "ConductorUser") - def list_users(self, apps: Optional[bool] = False) -> List[ConductorUser]: - kwargs = {"apps": apps} - return self.userResourceApi.list_users(**kwargs) + def list_users(self, apps: Optional[bool] = False, **kwargs) -> List[ConductorUser]: + """List all users in the system. - def delete_user(self, user_id: str): - self.userResourceApi.delete_user(user_id) + Args: + apps: If True, include application users + **kwargs: Additional optional parameters to pass to the API - # Groups + Returns: + List of ConductorUser instances - def upsert_group(self, upsert_group_request: UpsertGroupRequest, group_id: str) -> Group: - group_obj = self.groupResourceApi.upsert_group(upsert_group_request, group_id) - return self.api_client.deserialize_class(group_obj, "Group") + Example: + ```python + users = auth_client.list_users() + for user in users: + print(f"User: {user.name}, Email: {user.email}") + ``` + """ + kwargs["apps"] = apps + return self._user_api.list_users(**kwargs) + + def delete_user(self, user_id: str, **kwargs) -> None: + """Delete a user by ID. + + Args: + user_id: Unique identifier for the user + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + auth_client.delete_user("user-123") + ``` + """ + self._user_api.delete_user(id=user_id, **kwargs) - def get_group(self, group_id: str) -> Group: - group_obj = self.groupResourceApi.get_group(group_id) + # Groups + def upsert_group( + self, upsert_group_request: UpsertGroupRequest, group_id: str, **kwargs + ) -> Group: + """Create or update a group. + + Args: + upsert_group_request: Group details including name and description + group_id: Unique identifier for the group + **kwargs: Additional optional parameters to pass to the API + + Returns: + Group instance containing the group details + + Example: + ```python + from conductor.client.http.models.upsert_group_request import UpsertGroupRequest + + request = UpsertGroupRequest( + name="Engineering Team", + description="All engineering team members" + ) + group = auth_client.upsert_group(request, "engineering") + print(f"Group: {group.name}") + ``` + """ + group_obj = self._group_api.upsert_group(body=upsert_group_request, id=group_id, **kwargs) return self.api_client.deserialize_class(group_obj, "Group") - def list_groups(self) -> List[Group]: - return self.groupResourceApi.list_groups() + def get_group(self, group_id: str, **kwargs) -> Group: + """Get a group by ID. + + Args: + group_id: Unique identifier for the group + **kwargs: Additional optional parameters to pass to the API - def delete_group(self, group_id: str): - self.groupResourceApi.delete_group(group_id) + Returns: + Group instance containing the group details - def add_user_to_group(self, group_id: str, user_id: str): - self.groupResourceApi.add_user_to_group(group_id, user_id) + Example: + ```python + group = auth_client.get_group("engineering") + print(f"Group: {group.name}, Members: {len(group.members)}") + ``` + """ + group_obj = self._group_api.get_group(id=group_id, **kwargs) + return self.api_client.deserialize_class(group_obj, "Group") - def get_users_in_group(self, group_id: str) -> List[ConductorUser]: - user_objs = self.groupResourceApi.get_users_in_group(group_id) - group_users = [] + def list_groups(self, **kwargs) -> List[Group]: + """List all groups in the system. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of Group instances + + Example: + ```python + groups = auth_client.list_groups() + for group in groups: + print(f"Group: {group.name}, Members: {len(group.members)}") + ``` + """ + return self._group_api.list_groups(**kwargs) + + def delete_group(self, group_id: str, **kwargs) -> None: + """Delete a group by ID. + + Args: + group_id: Unique identifier for the group + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + auth_client.delete_group("old-group") + ``` + """ + self._group_api.delete_group(id=group_id, **kwargs) + + def add_user_to_group(self, group_id: str, user_id: str, **kwargs) -> None: + """Add a user to a group. + + Args: + group_id: Unique identifier for the group + user_id: Unique identifier for the user + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + auth_client.add_user_to_group("engineering", "user-123") + ``` + """ + self._group_api.add_user_to_group(group_id=group_id, user_id=user_id, **kwargs) + + def get_users_in_group(self, group_id: str, **kwargs) -> List[ConductorUser]: + """Get all users in a group. + + Args: + group_id: Unique identifier for the group + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of ConductorUser instances + + Example: + ```python + users = auth_client.get_users_in_group("engineering") + for user in users: + print(f"User: {user.name}, Email: {user.email}") + ``` + """ + user_objs = self._group_api.get_users_in_group(id=group_id, **kwargs) + group_users: List[ConductorUser] = [] for u in user_objs: c_user = self.api_client.deserialize_class(u, "ConductorUser") group_users.append(c_user) return group_users - def remove_user_from_group(self, group_id: str, user_id: str): - self.groupResourceApi.remove_user_from_group(group_id, user_id) + def remove_user_from_group(self, group_id: str, user_id: str, **kwargs) -> None: + """Remove a user from a group. + + Args: + group_id: Unique identifier for the group + user_id: Unique identifier for the user + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + auth_client.remove_user_from_group("engineering", "user-123") + ``` + """ + self._group_api.remove_user_from_group(group_id=group_id, user_id=user_id, **kwargs) # Permissions - def grant_permissions(self, subject: SubjectRef, target: TargetRef, access: List[AccessType]): + def grant_permissions( + self, subject: SubjectRef, target: TargetRef, access: List[AccessType] + ) -> None: + """Grant permissions to a subject on a target resource. + + Args: + subject: Subject (user, group, or application) to grant permissions to + target: Target resource (workflow, task, etc.) + access: List of access types (READ, EXECUTE, UPDATE, DELETE, etc.) + + Returns: + None + + Example: + ```python + from conductor.client.http.models.subject_ref import SubjectRef + from conductor.client.http.models.target_ref import TargetRef + from conductor.client.orkes.models.access_type import AccessType + + subject = SubjectRef(id="user-123", type="USER") + target = TargetRef(id="workflow-456", type="WORKFLOW_DEF") + access = [AccessType.READ, AccessType.EXECUTE] + + auth_client.grant_permissions(subject, target, access) + ``` + """ req = AuthorizationRequest(subject, target, access) - self.authorizationResourceApi.grant_permissions(req) + self._authorization_api.grant_permissions(body=req) + + def get_permissions(self, target: TargetRef, **kwargs) -> Dict[str, List[SubjectRef]]: + """Get all permissions for a target resource. + + Args: + target: Target resource to get permissions for + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping access types to lists of subjects with that access + + Example: + ```python + from conductor.client.http.models.target_ref import TargetRef + + target = TargetRef(id="workflow-456", type="WORKFLOW_DEF") + permissions = auth_client.get_permissions(target) + + for access_type, subjects in permissions.items(): + print(f"{access_type}: {len(subjects)} subjects") + ``` + """ + resp_obj = self._authorization_api.get_permissions( + type=target.type.name, id=target.id, **kwargs + ) + permissions: Dict[str, List[SubjectRef]] = {} - def get_permissions(self, target: TargetRef) -> Dict[str, List[SubjectRef]]: - resp_obj = self.authorizationResourceApi.get_permissions(target.type.name, target.id) - permissions = {} for access_type, subjects in resp_obj.items(): subject_list = [SubjectRef(sub["id"], sub["type"]) for sub in subjects] permissions[access_type] = subject_list + return permissions - def get_granted_permissions_for_group(self, group_id: str) -> List[GrantedPermission]: - granted_access_obj = self.groupResourceApi.get_granted_permissions1(group_id) - granted_permissions = [] + def get_granted_permissions_for_group(self, group_id: str, **kwargs) -> List[GrantedPermission]: + """Get all permissions granted to a group. + + Args: + group_id: Unique identifier for the group + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of GrantedPermission instances + + Example: + ```python + permissions = auth_client.get_granted_permissions_for_group("engineering") + for perm in permissions: + print(f"Target: {perm.target.id}, Access: {perm.access}") + ``` + """ + granted_access_obj = self._group_api.get_granted_permissions1(group_id=group_id, **kwargs) + granted_permissions: List[GrantedPermission] = [] for ga in granted_access_obj.granted_access: target = TargetRef(ga.target.id, ga.target.type) access = ga.access granted_permissions.append(GrantedPermission(target, access)) + return granted_permissions - def get_granted_permissions_for_user(self, user_id: str) -> List[GrantedPermission]: - granted_access_obj = self.userResourceApi.get_granted_permissions(user_id) + def get_granted_permissions_for_user(self, user_id: str, **kwargs) -> List[GrantedPermission]: + """Get all permissions granted to a user. + + Args: + user_id: Unique identifier for the user + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of GrantedPermission instances + + Example: + ```python + permissions = auth_client.get_granted_permissions_for_user("user-123") + for perm in permissions: + print(f"Target: {perm.target.id}, Access: {perm.access}") + ``` + """ + granted_access_obj = self._user_api.get_granted_permissions(user_id=user_id, **kwargs) + + granted_access_obj_dict = cast(Dict[str, Any], granted_access_obj) + granted_permissions = [] - for ga in granted_access_obj["grantedAccess"]: + for ga in granted_access_obj_dict["grantedAccess"]: target = TargetRef(ga["target"]["id"], ga["target"]["type"]) access = ga["access"] granted_permissions.append(GrantedPermission(target, access)) return granted_permissions - def remove_permissions(self, subject: SubjectRef, target: TargetRef, access: List[AccessType]): + def remove_permissions( + self, subject: SubjectRef, target: TargetRef, access: List[AccessType], **kwargs + ) -> None: + """Remove permissions from a subject on a target resource. + + Args: + subject: Subject (user, group, or application) to remove permissions from + target: Target resource (workflow, task, etc.) + access: List of access types to remove + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.subject_ref import SubjectRef + from conductor.client.http.models.target_ref import TargetRef + from conductor.client.orkes.models.access_type import AccessType + + subject = SubjectRef(id="user-123", type="USER") + target = TargetRef(id="workflow-456", type="WORKFLOW_DEF") + access = [AccessType.DELETE] + + auth_client.remove_permissions(subject, target, access) + ``` + """ req = AuthorizationRequest(subject, target, access) - self.authorizationResourceApi.remove_permissions(req) + self._authorization_api.remove_permissions(body=req, **kwargs) + + def add_users_to_group(self, body: List[str], group_id: str, **kwargs) -> None: + """Add multiple users to a group. + + Args: + body: List of user IDs to add to the group + group_id: Unique identifier for the group + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + user_ids = ["user-123", "user-456", "user-789"] + auth_client.add_users_to_group(user_ids, "engineering") + ``` + """ + self._group_api.add_users_to_group(body=body, group_id=group_id, **kwargs) + + def remove_users_from_group(self, body: List[str], group_id: str, **kwargs) -> None: + """Remove multiple users from a group. + + Args: + body: List of user IDs to remove from the group + group_id: Unique identifier for the group + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + user_ids = ["user-123", "user-456"] + auth_client.remove_users_from_group(user_ids, "engineering") + ``` + """ + self._group_api.remove_users_from_group(body=body, group_id=group_id, **kwargs) + + def check_permissions(self, user_id: str, type: str, id: str, **kwargs) -> Dict[str, bool]: + """Check a user's permissions on a specific resource. + + Args: + user_id: Unique identifier for the user + type: Resource type (e.g., "WORKFLOW_DEF", "TASK_DEF") + id: Resource identifier + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping access types to boolean values indicating if user has that permission + + Example: + ```python + permissions = auth_client.check_permissions( + "user-123", + "WORKFLOW_DEF", + "workflow-456" + ) + + if permissions.get("EXECUTE"): + print("User can execute the workflow") + if permissions.get("UPDATE"): + print("User can update the workflow") + ``` + """ + result = self._user_api.check_permissions(user_id=user_id, type=type, id=id, **kwargs) + + result_dict = cast(Dict[str, Any], result) + result_model = {k: v for k, v in result_dict.items() if isinstance(v, bool)} + + return result_model diff --git a/src/conductor/client/orkes/orkes_base_client.py b/src/conductor/client/orkes/orkes_base_client.py index 08adbc49a..4908ae71e 100644 --- a/src/conductor/client/orkes/orkes_base_client.py +++ b/src/conductor/client/orkes/orkes_base_client.py @@ -1,8 +1,13 @@ import logging +import warnings + +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated from conductor.client.configuration.configuration import Configuration from conductor.client.http.api.application_resource_api import ApplicationResourceApi from conductor.client.http.api.authorization_resource_api import AuthorizationResourceApi +from conductor.client.http.api.event_resource_api import EventResourceApi from conductor.client.http.api.group_resource_api import GroupResourceApi from conductor.client.http.api.integration_resource_api import IntegrationResourceApi from conductor.client.http.api.metadata_resource_api import MetadataResourceApi @@ -16,25 +21,355 @@ from conductor.client.http.api.user_resource_api import UserResourceApi from conductor.client.http.api.workflow_resource_api import WorkflowResourceApi from conductor.client.http.api_client import ApiClient -from conductor.client.http.api.event_resource_api import EventResourceApi class OrkesBaseClient(object): def __init__(self, configuration: Configuration): self.api_client = ApiClient(configuration) self.logger = logging.getLogger(Configuration.get_logging_formatted_name(__name__)) - self.metadataResourceApi = MetadataResourceApi(self.api_client) - self.taskResourceApi = TaskResourceApi(self.api_client) - self.workflowResourceApi = WorkflowResourceApi(self.api_client) - self.applicationResourceApi = ApplicationResourceApi(self.api_client) - self.secretResourceApi = SecretResourceApi(self.api_client) - self.userResourceApi = UserResourceApi(self.api_client) - self.groupResourceApi = GroupResourceApi(self.api_client) - self.authorizationResourceApi = AuthorizationResourceApi(self.api_client) - self.schedulerResourceApi = SchedulerResourceApi(self.api_client) - self.tagsApi = TagsApi(self.api_client) - self.integrationApi = IntegrationResourceApi(self.api_client) - self.promptApi = PromptResourceApi(self.api_client) - self.schemaApi = SchemaResourceApi(self.api_client) - self.serviceRegistryResourceApi = ServiceRegistryResourceApi(self.api_client) - self.eventResourceApi = EventResourceApi(self.api_client) + + self._metadata_api = MetadataResourceApi(self.api_client) + self._task_api = TaskResourceApi(self.api_client) + self._workflow_api = WorkflowResourceApi(self.api_client) + self._application_api = ApplicationResourceApi(self.api_client) + self._secret_api = SecretResourceApi(self.api_client) + self._user_api = UserResourceApi(self.api_client) + self._group_api = GroupResourceApi(self.api_client) + self._authorization_api = AuthorizationResourceApi(self.api_client) + self._scheduler_api = SchedulerResourceApi(self.api_client) + self._tags_api = TagsApi(self.api_client) + self._integration_api = IntegrationResourceApi(self.api_client) + self._prompt_api = PromptResourceApi(self.api_client) + self._schema_api = SchemaResourceApi(self.api_client) + self._service_registry_api = ServiceRegistryResourceApi(self.api_client) + self._event_api = EventResourceApi(self.api_client) + + @property + @typing_deprecated( + "metadataResourceApi is deprecated; use OrkesMetadataClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "metadataResourceApi is deprecated; use OrkesMetadataClient instead. " + "This attribute will be removed in a future version." + ) + def metadataResourceApi(self) -> MetadataResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesMetadataClient` methods instead. + """ + warnings.warn( + "'metadataResourceApi' is deprecated and will be removed in a future release. " + "Use `OrkesMetadataClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._metadata_api + + @property + @typing_deprecated( + "taskResourceApi is deprecated; use OrkesTaskClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "taskResourceApi is deprecated; use OrkesTaskClient instead. " + "This attribute will be removed in a future version." + ) + def taskResourceApi(self) -> TaskResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesTaskClient` methods instead. + """ + warnings.warn( + "'taskResourceApi' is deprecated and will be removed in a future release. " + "Use `OrkesTaskClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._task_api + + @property + @typing_deprecated( + "workflowResourceApi is deprecated; use OrkesWorkflowClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "workflowResourceApi is deprecated; use OrkesWorkflowClient instead. " + "This attribute will be removed in a future version." + ) + def workflowResourceApi(self) -> WorkflowResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesWorkflowClient` methods instead. + """ + warnings.warn( + "'workflowResourceApi' is deprecated and will be removed in a future release. " + "Use `OrkesWorkflowClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._workflow_api + + @property + @typing_deprecated( + "applicationResourceApi is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "applicationResourceApi is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + def applicationResourceApi(self) -> ApplicationResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesAuthorizationClient` methods instead. + """ + warnings.warn( + "'applicationResourceApi' is deprecated and will be removed in a future release. " + "Use `OrkesAuthorizationClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._application_api + + @property + @typing_deprecated( + "secretResourceApi is deprecated; use OrkesSecretClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "secretResourceApi is deprecated; use OrkesSecretClient instead. " + "This attribute will be removed in a future version." + ) + def secretResourceApi(self) -> SecretResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesSecretClient` methods instead. + """ + warnings.warn( + "'secretResourceApi' is deprecated and will be removed in a future release. " + "Use `OrkesSecretClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._secret_api + + @property + @typing_deprecated( + "userResourceApi is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "userResourceApi is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + def userResourceApi(self) -> UserResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesAuthorizationClient` methods instead. + """ + warnings.warn( + "'userResourceApi' is deprecated and will be removed in a future release. " + "Use `OrkesAuthorizationClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._user_api + + @property + @typing_deprecated( + "groupResourceApi is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "groupResourceApi is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + def groupResourceApi(self) -> GroupResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesAuthorizationClient` methods instead. + """ + warnings.warn( + "'groupResourceApi' is deprecated and will be removed in a future release. " + "Use `OrkesAuthorizationClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._group_api + + @property + @typing_deprecated( + "authorizationResourceApi is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "authorizationResourceApi is deprecated; use OrkesAuthorizationClient instead. " + "This attribute will be removed in a future version." + ) + def authorizationResourceApi(self) -> AuthorizationResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesAuthorizationClient` methods instead. + """ + warnings.warn( + "'authorizationResourceApi' is deprecated and will be removed in a future release. " + "Use `OrkesAuthorizationClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._authorization_api + + @property + @typing_deprecated( + "schedulerResourceApi is deprecated; use OrkesSchedulerClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "schedulerResourceApi is deprecated; use OrkesSchedulerClient instead. " + "This attribute will be removed in a future version." + ) + def schedulerResourceApi(self) -> SchedulerResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesSchedulerClient` methods instead. + """ + warnings.warn( + "'schedulerResourceApi' is deprecated and will be removed in a future release. " + "Use `OrkesSchedulerClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._scheduler_api + + @property + @typing_deprecated( + "tagsApi is deprecated; use OrkesTagsClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "tagsApi is deprecated; use OrkesTagsClient instead. " + "This attribute will be removed in a future version." + ) + def tagsApi(self) -> TagsApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesTagsClient` methods instead. + """ + warnings.warn( + "'tagsApi' is deprecated and will be removed in a future release. " + "Use `OrkesTagsClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._tags_api + + @property + @typing_deprecated( + "integrationApi is deprecated; use OrkesIntegrationClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "integrationApi is deprecated; use OrkesIntegrationClient instead. " + "This attribute will be removed in a future version." + ) + def integrationApi(self) -> IntegrationResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesIntegrationClient` methods instead. + """ + warnings.warn( + "'integrationApi' is deprecated and will be removed in a future release. " + "Use `OrkesIntegrationClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._integration_api + + @property + @typing_deprecated( + "promptApi is deprecated; use OrkesPromptClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "promptApi is deprecated; use OrkesPromptClient instead. " + "This attribute will be removed in a future version." + ) + def promptApi(self) -> PromptResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesPromptClient` methods instead. + """ + warnings.warn( + "'promptApi' is deprecated and will be removed in a future release. " + "Use `OrkesPromptClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._prompt_api + + @property + @typing_deprecated( + "schemaApi is deprecated; use OrkesSchemaClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "schemaApi is deprecated; use OrkesSchemaClient instead. " + "This attribute will be removed in a future version." + ) + def schemaApi(self) -> SchemaResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesSchemaClient` methods instead. + """ + warnings.warn( + "'schemaApi' is deprecated and will be removed in a future release. " + "Use `OrkesSchemaClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._schema_api + + @property + @typing_deprecated( + "serviceRegistryResourceApi is deprecated; use OrkesServiceRegistryClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "serviceRegistryResourceApi is deprecated; use OrkesServiceRegistryClient instead. " + "This attribute will be removed in a future version." + ) + def serviceRegistryResourceApi(self) -> ServiceRegistryResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesServiceRegistryClient` methods instead. + """ + warnings.warn( + "'serviceRegistryResourceApi' is deprecated and will be removed in a future release. " + "Use `OrkesServiceRegistryClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._service_registry_api + + @property + @typing_deprecated( + "eventResourceApi is deprecated; use OrkesEventClient instead. " + "This attribute will be removed in a future version." + ) + @deprecated( + "eventResourceApi is deprecated; use OrkesEventClient instead. " + "This attribute will be removed in a future version." + ) + def eventResourceApi(self) -> EventResourceApi: + """ + Deprecated: attribute-style access maintained for backward compatibility. + Prefer using `OrkesEventClient` methods instead. + """ + warnings.warn( + "'eventResourceApi' is deprecated and will be removed in a future release. " + "Use `OrkesEventClient` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._event_api diff --git a/src/conductor/client/orkes/orkes_event_client.py b/src/conductor/client/orkes/orkes_event_client.py index c4efa370a..9521b3319 100644 --- a/src/conductor/client/orkes/orkes_event_client.py +++ b/src/conductor/client/orkes/orkes_event_client.py @@ -1,19 +1,16 @@ from __future__ import annotations -from typing import List +from typing import Dict, List from conductor.client.adapters.models.event_handler_adapter import EventHandlerAdapter from conductor.client.adapters.models.tag_adapter import TagAdapter +from conductor.client.http.models.connectivity_test_input import ConnectivityTestInput +from conductor.client.http.models.connectivity_test_result import ConnectivityTestResult +from conductor.client.http.models.event_handler import EventHandler from conductor.client.orkes.orkes_base_client import OrkesBaseClient class OrkesEventClient(OrkesBaseClient): - """Event management client for Orkes Conductor platform. - - Provides comprehensive event handling capabilities including event handler - management, tag operations, queue configuration, and event execution monitoring. - """ - # Event Handler Operations def create_event_handler(self, event_handler: List[EventHandlerAdapter]) -> None: """Create a new event handler. @@ -21,62 +18,57 @@ def create_event_handler(self, event_handler: List[EventHandlerAdapter]) -> None Creates one or more event handlers that will be triggered by specific events. Event handlers define what actions to take when certain events occur in the system. - Parameters: - ----------- - event_handler : List[EventHandlerAdapter] - List of event handler configurations to create + Args: + event_handler: List of event handler configurations to create - Example: - -------- - ```python - from conductor.client.adapters.models.event_handler_adapter import EventHandlerAdapter - from conductor.client.adapters.models.action_adapter import ActionAdapter - - # Create an event handler - event_handler = EventHandlerAdapter( - name="workflow_trigger", - event="workflow.completed", - active=True, - condition="payload.status == 'COMPLETED'", - actions=[ - ActionAdapter( - action="start_workflow", - workflow_id="notification_workflow", - input_parameters={"message": "Workflow completed successfully"} - ) - ] - ) + Returns: + None - event_client.create_event_handler([event_handler]) - ``` + Example: + ```python + from conductor.client.adapters.models.event_handler_adapter import EventHandlerAdapter + from conductor.client.adapters.models.action_adapter import ActionAdapter + + # Create an event handler + event_handler = EventHandlerAdapter( + name="workflow_trigger", + event="workflow.completed", + active=True, + condition="payload.status == 'COMPLETED'", + actions=[ + ActionAdapter( + action="start_workflow", + workflow_id="notification_workflow", + input_parameters={"message": "Workflow completed successfully"} + ) + ] + ) + + event_client.create_event_handler([event_handler]) + ``` """ - return self.eventResourceApi.add_event_handler(event_handler) + return self._event_api.add_event_handler(event_handler) def get_event_handler(self, name: str) -> EventHandlerAdapter: """Get event handler by name. Retrieves a specific event handler configuration by its name. - Parameters: - ----------- - name : str - The name of the event handler to retrieve + Args: + name: The name of the event handler to retrieve Returns: - -------- - EventHandlerAdapter - The event handler configuration + EventHandlerAdapter instance with the event handler configuration Example: - -------- - ```python - # Get a specific event handler - handler = event_client.get_event_handler("workflow_trigger") - print(f"Handler event: {handler.event}") - print(f"Handler active: {handler.active}") - ``` + ```python + # Get a specific event handler + handler = event_client.get_event_handler("workflow_trigger") + print(f"Handler event: {handler.event}") + print(f"Handler active: {handler.active}") + ``` """ - return self.eventResourceApi.get_event_handler_by_name(name=name) + return self._event_api.get_event_handler_by_name(name=name) def list_event_handlers(self) -> List[EventHandlerAdapter]: """List all event handlers. @@ -84,48 +76,40 @@ def list_event_handlers(self) -> List[EventHandlerAdapter]: Retrieves all event handlers configured in the system. Returns: - -------- - List[EventHandlerAdapter] - List of all event handler configurations + List of EventHandlerAdapter instances Example: - -------- - ```python - # List all event handlers - handlers = event_client.list_event_handlers() - for handler in handlers: - print(f"Handler: {handler.name}, Event: {handler.event}, Active: {handler.active}") - ``` + ```python + # List all event handlers + handlers = event_client.list_event_handlers() + for handler in handlers: + print(f"Handler: {handler.name}, Event: {handler.event}, Active: {handler.active}") + ``` """ - return self.eventResourceApi.get_event_handlers() + return self._event_api.get_event_handlers() def list_event_handlers_for_event(self, event: str) -> List[EventHandlerAdapter]: """List event handlers for a specific event. Retrieves all event handlers that are configured to respond to a specific event type. - Parameters: - ----------- - event : str - The event type to filter handlers by (e.g., "workflow.completed", "task.failed") + Args: + event: The event type to filter handlers by (e.g., "workflow.completed", "task.failed") Returns: - -------- - List[EventHandlerAdapter] - List of event handlers that respond to the specified event + List of EventHandlerAdapter instances that respond to the specified event Example: - -------- - ```python - # Get handlers for workflow completion events - handlers = event_client.list_event_handlers_for_event("workflow.completed") - print(f"Found {len(handlers)} handlers for workflow.completed events") - - # Get handlers for task failure events - failure_handlers = event_client.list_event_handlers_for_event("task.failed") - ``` + ```python + # Get handlers for workflow completion events + handlers = event_client.list_event_handlers_for_event("workflow.completed") + print(f"Found {len(handlers)} handlers for workflow.completed events") + + # Get handlers for task failure events + failure_handlers = event_client.list_event_handlers_for_event("task.failed") + ``` """ - return self.eventResourceApi.get_event_handlers_for_event(event=event) + return self._event_api.get_event_handlers_for_event(event=event) def update_event_handler(self, event_handler: EventHandlerAdapter) -> None: """Update an existing event handler. @@ -133,43 +117,43 @@ def update_event_handler(self, event_handler: EventHandlerAdapter) -> None: Updates the configuration of an existing event handler. The handler is identified by its name field. - Parameters: - ----------- - event_handler : EventHandlerAdapter - Event handler configuration to update + Args: + event_handler: Event handler configuration to update + + Returns: + None Example: - -------- - ```python - # Update an existing event handler - handler = event_client.get_event_handler("workflow_trigger") - handler.active = False # Disable the handler - handler.condition = "payload.status == 'COMPLETED' AND payload.priority == 'HIGH'" - - event_client.update_event_handler(handler) - ``` + ```python + # Update an existing event handler + handler = event_client.get_event_handler("workflow_trigger") + handler.active = False # Disable the handler + handler.condition = "payload.status == 'COMPLETED' AND payload.priority == 'HIGH'" + + event_client.update_event_handler(handler) + ``` """ - return self.eventResourceApi.update_event_handler(event_handler) + return self._event_api.update_event_handler(event_handler) def delete_event_handler(self, name: str) -> None: """Delete an event handler by name. Permanently removes an event handler from the system. - Parameters: - ----------- - name : str - The name of the event handler to delete + Args: + name: The name of the event handler to delete + + Returns: + None Example: - -------- - ```python - # Delete an event handler - event_client.delete_event_handler("old_workflow_trigger") - print("Event handler deleted successfully") - ``` + ```python + # Delete an event handler + event_client.delete_event_handler("old_workflow_trigger") + print("Event handler deleted successfully") + ``` """ - return self.eventResourceApi.remove_event_handler_status(name=name) + return self._event_api.remove_event_handler_status(name=name) # Event Handler Tag Operations def get_event_handler_tags(self, name: str) -> List[TagAdapter]: @@ -178,87 +162,80 @@ def get_event_handler_tags(self, name: str) -> List[TagAdapter]: Retrieves all tags associated with a specific event handler. Tags are used for organizing and categorizing event handlers. - Parameters: - ----------- - name : str - The name of the event handler + Args: + name: The name of the event handler Returns: - -------- - List[TagAdapter] - List of tags associated with the event handler + List of TagAdapter instances Example: - -------- - ```python - # Get tags for an event handler - tags = event_client.get_event_handler_tags("workflow_trigger") - for tag in tags: - print(f"Tag: {tag.key} = {tag.value}") - ``` + ```python + # Get tags for an event handler + tags = event_client.get_event_handler_tags("workflow_trigger") + for tag in tags: + print(f"Tag: {tag.key} = {tag.value}") + ``` """ - return self.eventResourceApi.get_tags_for_event_handler(name=name) + return self._event_api.get_tags_for_event_handler(name=name) def add_event_handler_tag(self, name: str, tags: List[TagAdapter]) -> None: """Add tags to an event handler. Associates one or more tags with an event handler for organization and categorization. - Parameters: - ----------- - name : str - The name of the event handler - tags : List[TagAdapter] - List of tags to add to the event handler + Args: + name: The name of the event handler + tags: List of tags to add to the event handler + + Returns: + None Example: - -------- - ```python - from conductor.client.adapters.models.tag_adapter import TagAdapter - - # Add tags to an event handler - tags = [ - TagAdapter(key="environment", value="production"), - TagAdapter(key="team", value="platform"), - TagAdapter(key="priority", value="high") - ] - - event_client.add_event_handler_tag("workflow_trigger", tags) - ``` + ```python + from conductor.client.adapters.models.tag_adapter import TagAdapter + + # Add tags to an event handler + tags = [ + TagAdapter(key="environment", value="production"), + TagAdapter(key="team", value="platform"), + TagAdapter(key="priority", value="high") + ] + + event_client.add_event_handler_tag("workflow_trigger", tags) + ``` """ # Note: Sync API uses (tags, name) parameter order due to swagger-codegen placing # body params before path params. Async API uses (name=name, tag=tags) instead. - return self.eventResourceApi.put_tag_for_event_handler(tags, name) + return self._event_api.put_tag_for_event_handler(tags, name) def remove_event_handler_tag(self, name: str, tags: List[TagAdapter]) -> None: """Remove tags from an event handler. Removes one or more tags from an event handler. - Parameters: - ----------- - name : str - The name of the event handler - tags : List[TagAdapter] - List of tags to remove from the event handler + Args: + name: The name of the event handler + tags: List of tags to remove from the event handler + + Returns: + None Example: - -------- - ```python - from conductor.client.adapters.models.tag_adapter import TagAdapter - - # Remove specific tags from an event handler - tags_to_remove = [ - TagAdapter(key="environment", value="production"), - TagAdapter(key="priority", value="high") - ] - - event_client.remove_event_handler_tag("workflow_trigger", tags_to_remove) - ``` + ```python + from conductor.client.adapters.models.tag_adapter import TagAdapter + + # Remove specific tags from an event handler + tags_to_remove = [ + TagAdapter(key="environment", value="production"), + TagAdapter(key="priority", value="high") + ] + + event_client.remove_event_handler_tag("workflow_trigger", tags_to_remove) + ``` """ # Note: Sync API uses (tags, name) parameter order due to swagger-codegen placing # body params before path params. Async API uses (name=name, tag=tags) instead. - return self.eventResourceApi.delete_tag_for_event_handler(tags, name) + return self._event_api.delete_tag_for_event_handler(tags, name) # Queue Configuration Operations def get_queue_configuration(self, queue_type: str, queue_name: str) -> dict: @@ -266,49 +243,173 @@ def get_queue_configuration(self, queue_type: str, queue_name: str) -> dict: Retrieves the configuration for a specific event queue. - Parameters: - ----------- - queue_type : str - The type of queue (e.g., "kafka", "sqs", "rabbitmq") - queue_name : str - The name of the queue + Args: + queue_type: The type of queue (e.g., "kafka", "sqs", "rabbitmq") + queue_name: The name of the queue Returns: - -------- - dict - Queue configuration settings + Dictionary with queue configuration settings Example: - -------- - ```python - # Get Kafka queue configuration - config = event_client.get_queue_configuration("kafka", "workflow_events") - print(f"Bootstrap servers: {config.get('bootstrapServers')}") - print(f"Topic: {config.get('topic')}") - ``` + ```python + # Get Kafka queue configuration + config = event_client.get_queue_configuration("kafka", "workflow_events") + print(f"Bootstrap servers: {config.get('bootstrapServers')}") + print(f"Topic: {config.get('topic')}") + ``` """ - return self.eventResourceApi.get_queue_config(queue_type=queue_type, queue_name=queue_name) + return self._event_api.get_queue_config(queue_type=queue_type, queue_name=queue_name) def delete_queue_configuration(self, queue_type: str, queue_name: str) -> None: """Delete queue configuration. Removes the configuration for an event queue. - Parameters: - ----------- - queue_type : str - The type of queue (e.g., "kafka", "sqs", "rabbitmq") - queue_name : str - The name of the queue + Args: + queue_type: The type of queue (e.g., "kafka", "sqs", "rabbitmq") + queue_name: The name of the queue + + Returns: + None + + Example: + ```python + # Delete a queue configuration + event_client.delete_queue_configuration("kafka", "old_workflow_events") + print("Queue configuration deleted") + ``` + """ + return self._event_api.delete_queue_config(queue_type=queue_type, queue_name=queue_name) + + def get_queue_names(self, **kwargs) -> Dict[str, str]: + """Get all queue names. + + Retrieves a dictionary of all configured queue names and their types. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping queue names to queue types + + Example: + ```python + queue_names = event_client.get_queue_names() + for name, queue_type in queue_names.items(): + print(f"Queue: {name}, Type: {queue_type}") + ``` + """ + return self._event_api.get_queue_names(**kwargs) + + def handle_incoming_event(self, body: Dict[str, object], **kwargs) -> None: + """Handle an incoming event. + + Processes an incoming event by routing it to the appropriate event handlers. + + Args: + body: Event payload as dictionary + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Handle a custom event + event_payload = { + "eventType": "order.placed", + "orderId": "12345", + "customerId": "cust-999", + "timestamp": 1234567890 + } + + event_client.handle_incoming_event(event_payload) + ``` + """ + return self._event_api.handle_incoming_event(body, **kwargs) + + def put_queue_config(self, body: str, queue_type: str, queue_name: str, **kwargs) -> None: + """Put a queue configuration. + + Creates or updates the configuration for an event queue. + + Args: + body: Queue configuration as JSON string + queue_type: The type of queue (e.g., "kafka", "sqs", "rabbitmq") + queue_name: The name of the queue + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + import json + + # Configure a Kafka queue + kafka_config = json.dumps({ + "bootstrapServers": "localhost:9092", + "topic": "workflow_events", + "consumerGroup": "conductor_events", + "autoCommit": True + }) + + event_client.put_queue_config(kafka_config, "kafka", "workflow_events") + ``` + """ + return self._event_api.put_queue_config(body, queue_type, queue_name, **kwargs) + + def test(self, **kwargs) -> EventHandler: + """Test the event handler. + + Tests an event handler configuration without persisting it. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + EventHandler instance with test results + + Example: + ```python + result = event_client.test() + print(f"Test result: {result}") + ``` + """ + return self._event_api.test(**kwargs) + + def test_connectivity(self, body: ConnectivityTestInput, **kwargs) -> ConnectivityTestResult: + """Test the connectivity of an event handler. + + Tests whether an event handler can successfully connect to its configured + event source (e.g., Kafka, SQS, RabbitMQ). + + Args: + body: Connectivity test input with connection details + **kwargs: Additional optional parameters to pass to the API + + Returns: + ConnectivityTestResult with test status and details Example: - -------- - ```python - # Delete a queue configuration - event_client.delete_queue_configuration("kafka", "old_workflow_events") - print("Queue configuration deleted") - ``` + ```python + from conductor.client.http.models.connectivity_test_input import ConnectivityTestInput + + # Test Kafka connectivity + test_input = ConnectivityTestInput( + queue_type="kafka", + queue_name="workflow_events", + configuration={ + "bootstrapServers": "localhost:9092", + "topic": "workflow_events" + } + ) + + result = event_client.test_connectivity(test_input) + if result.success: + print("Connection successful!") + else: + print(f"Connection failed: {result.error_message}") + ``` """ - return self.eventResourceApi.delete_queue_config( - queue_type=queue_type, queue_name=queue_name - ) + return self._event_api.test_connectivity(body, **kwargs) diff --git a/src/conductor/client/orkes/orkes_integration_client.py b/src/conductor/client/orkes/orkes_integration_client.py index 7fd82ea69..70d4de53c 100644 --- a/src/conductor/client/orkes/orkes_integration_client.py +++ b/src/conductor/client/orkes/orkes_integration_client.py @@ -4,163 +4,753 @@ from conductor.client.codegen.rest import ApiException from conductor.client.configuration.configuration import Configuration +from conductor.client.http.models import MessageTemplate +from conductor.client.http.models.event_log import EventLog from conductor.client.http.models.integration import Integration from conductor.client.http.models.integration_api import IntegrationApi from conductor.client.http.models.integration_api_update import IntegrationApiUpdate from conductor.client.http.models.integration_def import IntegrationDef from conductor.client.http.models.integration_update import IntegrationUpdate -from conductor.client.http.models.prompt_template import PromptTemplate +from conductor.client.http.models.tag import Tag from conductor.client.integration_client import IntegrationClient from conductor.client.orkes.orkes_base_client import OrkesBaseClient class OrkesIntegrationClient(OrkesBaseClient, IntegrationClient): def __init__(self, configuration: Configuration): - super(OrkesIntegrationClient, self).__init__(configuration) + """Initialize the OrkesIntegrationClient with configuration. + + Args: + configuration: Configuration object containing server settings and authentication + + Example: + ```python + from conductor.client.configuration.configuration import Configuration + + config = Configuration(server_api_url="http://localhost:8080/api") + integration_client = OrkesIntegrationClient(config) + ``` + """ + super().__init__(configuration) def associate_prompt_with_integration( - self, ai_integration: str, model_name: str, prompt_name: str - ): - self.integrationApi.associate_prompt_with_integration( - ai_integration, model_name, prompt_name + self, ai_integration: str, model_name: str, prompt_name: str, **kwargs + ) -> None: + """Associate a prompt template with an AI integration. + + Args: + ai_integration: Name of the AI integration provider (e.g., "openai") + model_name: Name of the model (e.g., "gpt-4") + prompt_name: Name of the prompt template + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + integration_client.associate_prompt_with_integration( + "openai", + "gpt-4", + "customer_greeting" + ) + ``` + """ + self._integration_api.associate_prompt_with_integration( + integration_provider=ai_integration, + integration_name=model_name, + prompt_name=prompt_name, + **kwargs, ) - def delete_integration_api(self, api_name: str, integration_name: str): - self.integrationApi.delete_integration_api(api_name, integration_name) + def delete_integration_api(self, api_name: str, integration_name: str, **kwargs) -> None: + """Delete an integration API configuration. - def delete_integration(self, integration_name: str): - self.integrationApi.delete_integration_provider(integration_name) + Args: + api_name: Name of the API to delete + integration_name: Name of the integration + **kwargs: Additional optional parameters to pass to the API - def get_integration_api(self, api_name: str, integration_name: str) -> Optional[IntegrationApi]: + Returns: + None + + Example: + ```python + integration_client.delete_integration_api("payment_api", "stripe") + ``` + """ + self._integration_api.delete_integration_api( + name=api_name, integration_name=integration_name, **kwargs + ) + + def delete_integration(self, integration_name: str, **kwargs) -> None: + """Delete an integration provider. + + Args: + integration_name: Name of the integration to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + integration_client.delete_integration("old_stripe_integration") + ``` + """ + self._integration_api.delete_integration_provider(name=integration_name, **kwargs) + + def get_integration_api( + self, api_name: str, integration_name: str, **kwargs + ) -> Optional[IntegrationApi]: + """Get an integration API configuration by name. + + Args: + api_name: Name of the API + integration_name: Name of the integration + **kwargs: Additional optional parameters to pass to the API + + Returns: + IntegrationApi instance if found, None otherwise + + Example: + ```python + api = integration_client.get_integration_api("payment_api", "stripe") + if api: + print(f"API: {api.name}, Endpoint: {api.endpoint}") + ``` + """ try: - return self.integrationApi.get_integration_api(api_name, integration_name) + return self._integration_api.get_integration_api( + name=api_name, integration_name=integration_name, **kwargs + ) except ApiException as e: if e.is_not_found(): return None raise e - def get_integration_apis(self, integration_name: str) -> List[IntegrationApi]: - return self.integrationApi.get_integration_apis(integration_name) - - def get_integration(self, integration_name: str) -> Optional[Integration]: + def get_integration_apis(self, integration_name: str, **kwargs) -> List[IntegrationApi]: + """Get all API configurations for an integration. + + Args: + integration_name: Name of the integration + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of IntegrationApi instances + + Example: + ```python + apis = integration_client.get_integration_apis("stripe") + for api in apis: + print(f"API: {api.name}") + ``` + """ + return self._integration_api.get_integration_apis(name=integration_name, **kwargs) + + def get_integration(self, integration_name: str, **kwargs) -> Optional[Integration]: + """Get an integration by name. + + Args: + integration_name: Name of the integration + **kwargs: Additional optional parameters to pass to the API + + Returns: + Integration instance if found, None otherwise + + Example: + ```python + integration = integration_client.get_integration("stripe") + if integration: + print(f"Integration: {integration.name}, Enabled: {integration.enabled}") + ``` + """ try: - return self.integrationApi.get_integration_provider(integration_name) + return self._integration_api.get_integration_provider(name=integration_name, **kwargs) except ApiException as e: if e.is_not_found(): return None raise e - def get_integrations(self) -> List[Integration]: - return self.integrationApi.get_integration_providers() - - def get_integration_provider(self, name: str) -> Optional[IntegrationDef]: - """Get integration provider by name""" + def get_integrations(self, **kwargs) -> List[Integration]: + """Get all integrations. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of Integration instances + + Example: + ```python + integrations = integration_client.get_integrations() + for integration in integrations: + print(f"Integration: {integration.name}") + ``` + """ + return self._integration_api.get_integration_providers(**kwargs) + + def get_integration_provider(self, name: str, **kwargs) -> Optional[Integration]: + """Get integration provider by name. + + Args: + name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + Integration instance if found, None otherwise + + Example: + ```python + provider = integration_client.get_integration_provider("openai") + if provider: + print(f"Provider: {provider.name}, Category: {provider.category}") + ``` + """ try: - return self.integrationApi.get_integration_provider(name) + return self._integration_api.get_integration_provider(name=name, **kwargs) except ApiException as e: if e.is_not_found(): return None raise e def get_integration_providers( - self, category: Optional[str] = None, active_only: Optional[bool] = None - ) -> List[IntegrationDef]: - """Get all integration providers with optional filtering""" - kwargs = {} + self, category: Optional[str] = None, active_only: Optional[bool] = None, **kwargs + ) -> List[Integration]: + """Get all integration providers with optional filtering. + + Args: + category: Optional category filter (e.g., "AI_MODEL", "DATABASE") + active_only: If True, return only active providers + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of Integration instances + + Example: + ```python + # Get all providers + all_providers = integration_client.get_integration_providers() + + # Get only AI model providers + ai_providers = integration_client.get_integration_providers(category="AI_MODEL") + + # Get only active providers + active = integration_client.get_integration_providers(active_only=True) + ``` + """ if category is not None: kwargs["category"] = category if active_only is not None: kwargs["active_only"] = active_only - return self.integrationApi.get_integration_providers(**kwargs) + return self._integration_api.get_integration_providers(**kwargs) - def get_integration_provider_defs(self) -> List[IntegrationDef]: - """Get integration provider definitions""" - return self.integrationApi.get_integration_provider_defs() + def get_integration_provider_defs(self, **kwargs) -> List[IntegrationDef]: + """Get integration provider definitions. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of IntegrationDef instances with provider schemas + + Example: + ```python + defs = integration_client.get_integration_provider_defs() + for definition in defs: + print(f"Definition: {definition.name}") + ``` + """ + return self._integration_api.get_integration_provider_defs(**kwargs) def get_prompts_with_integration( - self, ai_integration: str, model_name: str - ) -> List[PromptTemplate]: - return self.integrationApi.get_prompts_with_integration(ai_integration, model_name) - - def save_integration_api(self, integration_name, api_name, api_details: IntegrationApiUpdate): - print(f"Saving integration API: {api_name} for integration: {integration_name}") - self.integrationApi.save_integration_api( - body=api_details, name=api_name, integration_name=integration_name + self, ai_integration: str, model_name: str, **kwargs + ) -> List[MessageTemplate]: + """Get prompts associated with an AI integration. + + Args: + ai_integration: Name of the AI integration provider + model_name: Name of the model + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of MessageTemplate instances + + Example: + ```python + prompts = integration_client.get_prompts_with_integration("openai", "gpt-4") + for prompt in prompts: + print(f"Prompt: {prompt.name}") + ``` + """ + return self._integration_api.get_prompts_with_integration( + integration_provider=ai_integration, integration_name=model_name, **kwargs ) - def save_integration(self, integration_name, integration_details: IntegrationUpdate): - self.integrationApi.save_integration_provider(integration_details, integration_name) + def save_integration_api( + self, integration_name: str, api_name: str, api_details: IntegrationApiUpdate, **kwargs + ) -> None: + """Save an integration API configuration. + + Args: + integration_name: Name of the integration + api_name: Name of the API + api_details: API configuration details + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.integration_api_update import IntegrationApiUpdate + + api_details = IntegrationApiUpdate( + endpoint="https://api.stripe.com/v1/charges", + method="POST", + headers={"Authorization": "Bearer ${api_key}"} + ) + + integration_client.save_integration_api("stripe", "create_charge", api_details) + ``` + """ + self._integration_api.save_integration_api( + body=api_details, name=api_name, integration_name=integration_name, **kwargs + ) - def save_integration_provider(self, name: str, integration_details: IntegrationUpdate) -> None: - """Create or update an integration provider""" - self.integrationApi.save_integration_provider(integration_details, name) + def save_integration( + self, integration_name: str, integration_details: IntegrationUpdate, **kwargs + ) -> None: + """Save an integration configuration. + + Args: + integration_name: Name of the integration + integration_details: Integration configuration details + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.integration_update import IntegrationUpdate + + integration_details = IntegrationUpdate( + category="PAYMENT", + enabled=True, + configuration={ + "api_key": "sk_test_...", + "webhook_secret": "whsec_..." + } + ) + + integration_client.save_integration("stripe", integration_details) + ``` + """ + self._integration_api.save_integration_provider( + body=integration_details, name=integration_name, **kwargs + ) - def get_token_usage_for_integration(self, name, integration_name) -> int: - return self.integrationApi.get_token_usage_for_integration(name, integration_name) + def save_integration_provider( + self, name: str, integration_details: IntegrationUpdate, **kwargs + ) -> None: + """Create or update an integration provider. + + Args: + name: Name of the integration provider + integration_details: Integration configuration details + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.integration_update import IntegrationUpdate + + integration_details = IntegrationUpdate( + category="AI_MODEL", + type="openai", + enabled=True, + configuration={ + "apiKey": "sk-...", + "model": "gpt-4" + } + ) + + integration_client.save_integration_provider("my-openai", integration_details) + ``` + """ + self._integration_api.save_integration_provider( + body=integration_details, name=name, **kwargs + ) - def get_token_usage_for_integration_provider(self, name) -> dict: - return self.integrationApi.get_token_usage_for_integration_provider(name) + def get_token_usage_for_integration(self, name: str, integration_name: str, **kwargs) -> int: + """Get token usage for a specific integration. + + Args: + name: Name of the integration provider + integration_name: Name of the specific integration + **kwargs: Additional optional parameters to pass to the API + + Returns: + Token usage count + + Example: + ```python + usage = integration_client.get_token_usage_for_integration("openai", "gpt-4") + print(f"Tokens used: {usage}") + ``` + """ + return self._integration_api.get_token_usage_for_integration( + name=name, integration_name=integration_name, **kwargs + ) - def register_token_usage(self, body, name, integration_name): - return self.integrationApi.register_token_usage(body, name, integration_name) + def get_token_usage_for_integration_provider(self, name: str, **kwargs) -> Dict[str, str]: + """Get token usage for an integration provider. + + Args: + name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping integration names to token usage + + Example: + ```python + usage = integration_client.get_token_usage_for_integration_provider("openai") + for integration, tokens in usage.items(): + print(f"{integration}: {tokens} tokens") + ``` + """ + return self._integration_api.get_token_usage_for_integration_provider(name=name, **kwargs) + + def register_token_usage(self, body: int, name: str, integration_name: str, **kwargs) -> None: + """Register token usage for an integration. + + Args: + body: Number of tokens used + name: Name of the integration provider + integration_name: Name of the specific integration + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Record 1500 tokens used + integration_client.register_token_usage(1500, "openai", "gpt-4") + ``` + """ + return self._integration_api.register_token_usage( + body=body, name=name, integration_name=integration_name, **kwargs + ) # Tags - def delete_tag_for_integration(self, body, tag_name, integration_name): - return self.integrationApi.delete_tag_for_integration(body, tag_name, integration_name) + def delete_tag_for_integration( + self, body: List[Tag], tag_name: str, integration_name: str, **kwargs + ) -> None: + """Delete tags for a specific integration. + + Args: + body: List of tags to delete + tag_name: Name of the tag + integration_name: Name of the integration + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.tag import Tag + + tags = [Tag(key="environment", value="staging")] + integration_client.delete_tag_for_integration(tags, "env_tag", "stripe") + ``` + """ + return self._integration_api.delete_tag_for_integration( + body=body, name=tag_name, integration_name=integration_name, **kwargs + ) - def delete_tag_for_integration_provider(self, body, name): - return self.integrationApi.delete_tag_for_integration_provider(body, name) + def delete_tag_for_integration_provider(self, body: List[Tag], name: str, **kwargs) -> None: + """Delete tags for an integration provider. - def put_tag_for_integration(self, body, name, integration_name): - return self.integrationApi.put_tag_for_integration(body, name, integration_name) + Args: + body: List of tags to delete + name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API - def put_tag_for_integration_provider(self, body, name): - return self.integrationApi.put_tag_for_integration_provider(body, name) + Returns: + None - def get_tags_for_integration(self, name, integration_name): - return self.integrationApi.get_tags_for_integration(name, integration_name) + Example: + ```python + from conductor.client.http.models.tag import Tag - def get_tags_for_integration_provider(self, name): - return self.integrationApi.get_tags_for_integration_provider(name) + tags = [Tag(key="category", value="deprecated")] + integration_client.delete_tag_for_integration_provider(tags, "openai") + ``` + """ + return self._integration_api.delete_tag_for_integration_provider( + body=body, name=name, **kwargs + ) - # Utility Methods for Integration Provider Management - def get_integration_provider_by_category( - self, category: str, active_only: bool = True - ) -> List[IntegrationDef]: - """Get integration providers filtered by category""" - return self.get_integration_providers(category=category, active_only=active_only) + def put_tag_for_integration( + self, body: List[Tag], name: str, integration_name: str, **kwargs + ) -> None: + """Set tags for a specific integration. + + Args: + body: List of tags to set + name: Name of the integration provider + integration_name: Name of the integration + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.tag import Tag + + tags = [ + Tag(key="environment", value="production"), + Tag(key="team", value="payments") + ] + integration_client.put_tag_for_integration(tags, "stripe", "payment_processor") + ``` + """ + return self._integration_api.put_tag_for_integration( + body=body, name=name, integration_name=integration_name, **kwargs + ) - def get_active_integration_providers(self) -> List[IntegrationDef]: - """Get only active integration providers""" - return self.get_integration_providers(active_only=True) + def put_tag_for_integration_provider(self, body: List[Tag], name: str, **kwargs) -> None: + """Set tags for an integration provider. + + Args: + body: List of tags to set + name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.tag import Tag + + tags = [ + Tag(key="category", value="ai_model"), + Tag(key="priority", value="high") + ] + integration_client.put_tag_for_integration_provider(tags, "openai") + ``` + """ + return self._integration_api.put_tag_for_integration_provider( + body=body, name=name, **kwargs + ) + + def get_tags_for_integration(self, name: str, integration_name: str, **kwargs) -> List[Tag]: + """Get tags for a specific integration. + + Args: + name: Name of the integration provider + integration_name: Name of the integration + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of Tag instances + + Example: + ```python + tags = integration_client.get_tags_for_integration("stripe", "payment_processor") + for tag in tags: + print(f"Tag: {tag.key}={tag.value}") + ``` + """ + return self._integration_api.get_tags_for_integration( + name=name, integration_name=integration_name, **kwargs + ) + + def get_tags_for_integration_provider(self, name: str, **kwargs) -> List[Tag]: + """Get tags for an integration provider. + + Args: + name: Name of the integration provider + **kwargs: Additional optional parameters to pass to the API - def get_integration_available_apis(self, name: str) -> List[IntegrationApi]: - """Get available APIs for an integration""" - return self.integrationApi.get_integration_available_apis(name) + Returns: + List of Tag instances - def save_all_integrations(self, request_body: List[IntegrationUpdate]) -> None: - """Save all integrations""" - self.integrationApi.save_all_integrations(request_body) + Example: + ```python + tags = integration_client.get_tags_for_integration_provider("openai") + for tag in tags: + print(f"Tag: {tag.key}={tag.value}") + ``` + """ + return self._integration_api.get_tags_for_integration_provider(name=name, **kwargs) + + # Utility Methods for Integration Provider Management + def get_integration_provider_by_category( + self, category: str, active_only: bool = True, **kwargs + ) -> List[Integration]: + """Get integration providers filtered by category. + + Args: + category: Category to filter by (e.g., "AI_MODEL", "DATABASE", "PAYMENT") + active_only: If True, return only active providers + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of Integration instances + + Example: + ```python + # Get all active AI model providers + ai_providers = integration_client.get_integration_provider_by_category("AI_MODEL") + ``` + """ + return self.get_integration_providers(category=category, active_only=active_only, **kwargs) + + def get_active_integration_providers(self, **kwargs) -> List[Integration]: + """Get only active integration providers. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of Integration instances + + Example: + ```python + active_providers = integration_client.get_active_integration_providers() + print(f"Found {len(active_providers)} active providers") + ``` + """ + return self.get_integration_providers(active_only=True, **kwargs) + + def get_integration_available_apis(self, name: str, **kwargs) -> List[str]: + """Get available APIs for an integration. + + Args: + name: Name of the integration + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of available API names + + Example: + ```python + apis = integration_client.get_integration_available_apis("stripe") + print(f"Available APIs: {', '.join(apis)}") + ``` + """ + return self._integration_api.get_integration_available_apis(name=name, **kwargs) + + def save_all_integrations(self, request_body: List[Integration], **kwargs) -> None: + """Save multiple integrations in bulk. + + Args: + request_body: List of Integration instances to save + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + integrations = [ + Integration(name="stripe", category="PAYMENT", enabled=True), + Integration(name="openai", category="AI_MODEL", enabled=True) + ] + integration_client.save_all_integrations(integrations) + ``` + """ + self._integration_api.save_all_integrations(body=request_body, **kwargs) def get_all_integrations( - self, category: Optional[str] = None, active_only: Optional[bool] = None + self, category: Optional[str] = None, active_only: Optional[bool] = None, **kwargs ) -> List[Integration]: - """Get all integrations with optional filtering""" - kwargs = {} + """Get all integrations with optional filtering. + + Args: + category: Optional category filter + active_only: If True, return only active integrations + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of Integration instances + + Example: + ```python + # Get all integrations + all_integrations = integration_client.get_all_integrations() + + # Get only active payment integrations + payments = integration_client.get_all_integrations( + category="PAYMENT", + active_only=True + ) + ``` + """ if category is not None: kwargs["category"] = category if active_only is not None: kwargs["active_only"] = active_only - return self.integrationApi.get_all_integrations(**kwargs) + return self._integration_api.get_all_integrations(**kwargs) def get_providers_and_integrations( - self, integration_type: Optional[str] = None, active_only: Optional[bool] = None - ) -> Dict[str, object]: - """Get providers and integrations together""" - kwargs = {} + self, integration_type: Optional[str] = None, active_only: Optional[bool] = None, **kwargs + ) -> List[str]: + """Get providers and integrations together. + + Args: + integration_type: Optional integration type filter + active_only: If True, return only active items + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of provider and integration names + + Example: + ```python + all_items = integration_client.get_providers_and_integrations() + print(f"Total providers and integrations: {len(all_items)}") + ``` + """ if integration_type is not None: kwargs["type"] = integration_type if active_only is not None: kwargs["active_only"] = active_only - return self.integrationApi.get_providers_and_integrations(**kwargs) + return self._integration_api.get_providers_and_integrations(**kwargs) + + def record_event_stats(self, body: List[EventLog], type: str, **kwargs) -> None: + """Record event statistics for integrations. + + Args: + body: List of event logs to record + type: Type of event + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.event_log import EventLog + + event_logs = [ + EventLog( + event_type="API_CALL", + integration_name="stripe", + timestamp=1234567890 + ) + ] + integration_client.record_event_stats(event_logs, "API_USAGE") + ``` + """ + self._integration_api.record_event_stats(body=body, type=type, **kwargs) diff --git a/src/conductor/client/orkes/orkes_metadata_client.py b/src/conductor/client/orkes/orkes_metadata_client.py index e206eb9d1..af0ad9a8d 100644 --- a/src/conductor/client/orkes/orkes_metadata_client.py +++ b/src/conductor/client/orkes/orkes_metadata_client.py @@ -2,7 +2,14 @@ from typing import List, Optional +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated + from conductor.client.configuration.configuration import Configuration +from conductor.client.http.models.extended_task_def import ExtendedTaskDef +from conductor.client.http.models.extended_workflow_def import ExtendedWorkflowDef +from conductor.client.http.models.incoming_bpmn_file import IncomingBpmnFile +from conductor.client.http.models.tag import Tag from conductor.client.http.models.tag_string import TagString from conductor.client.http.models.task_def import TaskDef from conductor.client.http.models.workflow_def import WorkflowDef @@ -14,85 +21,752 @@ class OrkesMetadataClient(OrkesBaseClient, MetadataClient): def __init__(self, configuration: Configuration): - super(OrkesMetadataClient, self).__init__(configuration) + """Initialize the OrkesMetadataClient with configuration. + + Args: + configuration: Configuration object containing server settings and authentication + + Example: + ```python + from conductor.client.configuration.configuration import Configuration + + config = Configuration(server_api_url="http://localhost:8080/api") + metadata_client = OrkesMetadataClient(config) + ``` + """ + super().__init__(configuration) + + @deprecated("register_workflow_def is deprecated; use register_workflow_def_validated instead") + @typing_deprecated( + "register_workflow_def is deprecated; use register_workflow_def_validated instead" + ) + def register_workflow_def( # type: ignore[override] + self, workflow_def: ExtendedWorkflowDef, overwrite: Optional[bool] = True + ) -> object: + """Register a workflow definition. + + .. deprecated:: + Use register_workflow_def_validated() instead. + + Args: + workflow_def: Workflow definition to register + overwrite: If True, overwrite existing definition + + Returns: + Response object + """ + return self._metadata_api.create(workflow_def, overwrite=overwrite) + + def register_workflow_def_validated( + self, + workflow_def: ExtendedWorkflowDef, + overwrite: bool = True, + new_version: bool = True, + **kwargs, + ) -> None: + """Register a workflow definition. + + Args: + workflow_def: Workflow definition to register + overwrite: If True, overwrite existing definition + new_version: If True, create a new version + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.extended_workflow_def import ExtendedWorkflowDef + from conductor.client.http.models.workflow_task import WorkflowTask + + workflow_def = ExtendedWorkflowDef( + name="order_workflow", + version=1, + description="Order processing workflow", + tasks=[ + WorkflowTask( + name="validate_order", + task_reference_name="validate", + type="SIMPLE" + ) + ] + ) + + metadata_client.register_workflow_def_validated(workflow_def) + ``` + """ + self._metadata_api.create( + body=workflow_def, overwrite=overwrite, new_version=new_version, **kwargs + ) + + @deprecated("update_workflow_def is deprecated; use update_workflow_def_validated instead") + @typing_deprecated( + "update_workflow_def is deprecated; use update_workflow_def_validated instead" + ) + def update_workflow_def( # type: ignore[override] + self, + workflow_def: ExtendedWorkflowDef, + overwrite: Optional[bool] = True, + **kwargs, + ) -> object: + """Update a workflow definition. + + .. deprecated:: + Use update_workflow_def_validated() instead. + + Args: + workflow_def: Workflow definition to update + overwrite: If True, overwrite existing definition + **kwargs: Additional optional parameters to pass to the API + + Returns: + Response object + """ + return self._metadata_api.update([workflow_def], overwrite=overwrite, **kwargs) - def register_workflow_def(self, workflow_def: WorkflowDef, overwrite: Optional[bool] = True): - self.metadataResourceApi.create(workflow_def, overwrite=overwrite) + def update_workflow_def_validated( + self, + workflow_def: ExtendedWorkflowDef, + overwrite: bool = True, + new_version: bool = True, + **kwargs, + ) -> None: + """Update a workflow definition. - def update_workflow_def(self, workflow_def: WorkflowDef, overwrite: Optional[bool] = True): - self.metadataResourceApi.update([workflow_def], overwrite=overwrite) + Args: + workflow_def: Workflow definition to update + overwrite: If True, overwrite existing definition + new_version: If True, create a new version + **kwargs: Additional optional parameters to pass to the API - def unregister_workflow_def(self, name: str, version: int): - self.metadataResourceApi.unregister_workflow_def(name, version) + Returns: + None - def get_workflow_def(self, name: str, version: Optional[int] = None) -> WorkflowDef: + Example: + ```python + workflow_def = metadata_client.get_workflow_def("order_workflow", 1) + workflow_def.description = "Updated description" + + metadata_client.update_workflow_def_validated(workflow_def) + ``` + """ + self._metadata_api.update( + body=[workflow_def], overwrite=overwrite, new_version=new_version, **kwargs + ) + + def unregister_workflow_def(self, name: str, version: int, **kwargs) -> None: + """Unregister a workflow definition by name and version. + + Args: + name: Name of the workflow + version: Version number + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + metadata_client.unregister_workflow_def("old_workflow", 1) + ``` + """ + self._metadata_api.unregister_workflow_def(name=name, version=version, **kwargs) + + def get_workflow_def(self, name: str, version: Optional[int] = None, **kwargs) -> WorkflowDef: + """Get a workflow definition by name and optional version. + + Args: + name: Name of the workflow + version: Optional version number. If None, gets latest version + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowDef instance + + Example: + ```python + # Get latest version + workflow = metadata_client.get_workflow_def("order_workflow") + + # Get specific version + workflow = metadata_client.get_workflow_def("order_workflow", 2) + print(f"Workflow: {workflow.name}, Version: {workflow.version}") + ``` + """ workflow = None if version: - workflow = self.metadataResourceApi.get1(name, version=version) + workflow = self._metadata_api.get1(name=name, version=version, **kwargs) else: - workflow = self.metadataResourceApi.get1(name) + workflow = self._metadata_api.get1(name=name, **kwargs) return workflow - def get_all_workflow_defs(self) -> List[WorkflowDef]: - return self.metadataResourceApi.get_workflow_defs() + def get_all_workflow_defs(self, **kwargs) -> List[WorkflowDef]: + """Get all workflow definitions. - def register_task_def(self, task_def: TaskDef): - self.metadataResourceApi.register_task_def([task_def]) + Args: + **kwargs: Additional optional parameters to pass to the API - def update_task_def(self, task_def: TaskDef): - self.metadataResourceApi.update_task_def(task_def) + Returns: + List of WorkflowDef instances - def unregister_task_def(self, task_type: str): - self.metadataResourceApi.unregister_task_def(task_type) + Example: + ```python + workflows = metadata_client.get_all_workflow_defs() + for workflow in workflows: + print(f"Workflow: {workflow.name}, Version: {workflow.version}") + ``` + """ + return self._metadata_api.get_workflow_defs(**kwargs) - def get_task_def(self, task_type: str) -> TaskDef: - return self.metadataResourceApi.get_task_def(task_type) + @deprecated("register_task_def is deprecated; use register_task_def_validated instead") + @typing_deprecated("register_task_def is deprecated; use register_task_def_validated instead") + def register_task_def(self, task_def: ExtendedTaskDef) -> object: # type: ignore[override] + """Register a task definition. - def get_all_task_defs(self) -> List[TaskDef]: - return self.metadataResourceApi.get_task_defs() + .. deprecated:: + Use register_task_def_validated() instead. - def add_workflow_tag(self, tag: MetadataTag, workflow_name: str): - self.tagsApi.add_workflow_tag(tag, workflow_name) + Args: + task_def: Task definition to register - def delete_workflow_tag(self, tag: MetadataTag, workflow_name: str): - tagStr = TagString(tag.key, tag.type, tag.value) - self.tagsApi.delete_workflow_tag(tagStr, workflow_name) + Returns: + Response object + """ + return self._metadata_api.register_task_def([task_def]) + + def register_task_def_validated(self, task_def: List[ExtendedTaskDef], **kwargs) -> None: + """Register one or more task definitions. + + Args: + task_def: List of task definitions to register + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.extended_task_def import ExtendedTaskDef + + task_defs = [ + ExtendedTaskDef( + name="validate_order", + description="Validate order details", + timeout_seconds=60 + ), + ExtendedTaskDef( + name="process_payment", + description="Process payment", + timeout_seconds=120 + ) + ] + + metadata_client.register_task_def_validated(task_defs) + ``` + """ + self._metadata_api.register_task_def(body=task_def, **kwargs) + + def update_task_def(self, task_def: TaskDef, **kwargs) -> None: + """Update a task definition. + + Args: + task_def: Task definition to update + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + task_def = metadata_client.get_task_def_validated("validate_order") + task_def.timeout_seconds = 90 + + metadata_client.update_task_def(task_def) + ``` + """ + self._metadata_api.update_task_def(body=task_def, **kwargs) + + def unregister_task_def(self, task_type: str, **kwargs) -> None: + """Unregister a task definition by task type. + + Args: + task_type: Type of task to unregister + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + metadata_client.unregister_task_def("old_task_type") + ``` + """ + self._metadata_api.unregister_task_def(tasktype=task_type, **kwargs) + + @deprecated("get_task_def is deprecated; use get_task_def_validated instead") + @typing_deprecated("get_task_def is deprecated; use get_task_def_validated instead") + def get_task_def(self, task_type: str) -> object: # type: ignore[override] + """Get a task definition by task type. + + .. deprecated:: + Use get_task_def_validated() instead. + + Args: + task_type: Type of task to retrieve + + Returns: + Response object + """ + return self._metadata_api.get_task_def(task_type) + + def get_task_def_validated(self, task_type: str, **kwargs) -> TaskDef: + """Get a task definition by task type. + + Args: + task_type: Type of task to retrieve + **kwargs: Additional optional parameters to pass to the API + + Returns: + TaskDef instance + + Example: + ```python + task_def = metadata_client.get_task_def_validated("validate_order") + print(f"Task: {task_def.name}, Timeout: {task_def.timeout_seconds}s") + ``` + """ + task_def = self._metadata_api.get_task_def(tasktype=task_type, **kwargs) + return self.api_client.deserialize_class(task_def, "TaskDef") + + def get_all_task_defs(self, **kwargs) -> List[TaskDef]: + """Get all task definitions. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TaskDef instances + + Example: + ```python + task_defs = metadata_client.get_all_task_defs() + for task_def in task_defs: + print(f"Task: {task_def.name}") + ``` + """ + return self._metadata_api.get_task_defs(**kwargs) + + def add_workflow_tag(self, tag: MetadataTag, workflow_name: str, **kwargs) -> None: + """Add a tag to a workflow. + + Args: + tag: Tag to add + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.orkes.models.metadata_tag import MetadataTag + + tag = MetadataTag(key="environment", type="METADATA", value="production") + metadata_client.add_workflow_tag(tag, "order_workflow") + ``` + """ + self._tags_api.add_workflow_tag(body=tag, name=workflow_name, **kwargs) + + def delete_workflow_tag(self, tag: MetadataTag, workflow_name: str, **kwargs) -> None: + """Delete a tag from a workflow. + + Args: + tag: Tag to delete + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.orkes.models.metadata_tag import MetadataTag + + tag = MetadataTag(key="environment", type="METADATA", value="staging") + metadata_client.delete_workflow_tag(tag, "order_workflow") + ``` + """ + tag_str = TagString(tag.key, tag.type, tag.value) + self._tags_api.delete_workflow_tag(body=tag_str, name=workflow_name, **kwargs) + + @deprecated("get_workflow_tags is deprecated; use get_workflow_tags_validated instead") + @typing_deprecated("get_workflow_tags is deprecated; use get_workflow_tags_validated instead") + def get_workflow_tags(self, workflow_name: str) -> object: # type: ignore[override] + """Get tags for a workflow. + + .. deprecated:: + Use get_workflow_tags_validated() instead. + + Args: + workflow_name: Name of the workflow + + Returns: + Response object + """ + return self._tags_api.get_workflow_tags(workflow_name) - def get_workflow_tags(self, workflow_name: str) -> List[MetadataTag]: - return self.tagsApi.get_workflow_tags(workflow_name) + def get_workflow_tags_validated(self, workflow_name: str, **kwargs) -> List[Tag]: + """Get tags for a workflow. - def set_workflow_tags(self, tags: List[MetadataTag], workflow_name: str): - self.tagsApi.set_workflow_tags(tags, workflow_name) + Args: + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + Returns: + List of Tag instances + + Example: + ```python + tags = metadata_client.get_workflow_tags_validated("order_workflow") + for tag in tags: + print(f"Tag: {tag.key}={tag.value}") + ``` + """ + tags = self._tags_api.get_workflow_tags(name=workflow_name, **kwargs) + return self.api_client.deserialize_class(tags, "List[Tag]") + + def set_workflow_tags(self, tags: List[MetadataTag], workflow_name: str, **kwargs) -> None: + """Set tags for a workflow, replacing any existing tags. + + Args: + tags: List of tags to set + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.orkes.models.metadata_tag import MetadataTag + + tags = [ + MetadataTag(key="environment", type="METADATA", value="production"), + MetadataTag(key="team", type="METADATA", value="platform") + ] + metadata_client.set_workflow_tags(tags, "order_workflow") + ``` + """ + self._tags_api.set_workflow_tags(body=tags, name=workflow_name, **kwargs) + + @deprecated("addTaskTag is deprecated; use add_task_tag instead") + @typing_deprecated("addTaskTag is deprecated; use add_task_tag instead") def addTaskTag(self, tag: MetadataTag, taskName: str): - self.tagsApi.add_task_tag(tag, taskName) + """Add a tag to a task. + + .. deprecated:: + Use add_task_tag() instead. + + Args: + tag: Tag to add + taskName: Name of the task + """ + self._tags_api.add_task_tag(tag, taskName) + def add_task_tag(self, tag: MetadataTag, task_name: str, **kwargs) -> None: + """Add a tag to a task. + + Args: + tag: Tag to add + task_name: Name of the task + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.orkes.models.metadata_tag import MetadataTag + + tag = MetadataTag(key="priority", type="METADATA", value="high") + metadata_client.add_task_tag(tag, "process_payment") + ``` + """ + tag_str = TagString(tag.key, tag.type, tag.value) + self._tags_api.add_task_tag(tag=tag_str, task_name=task_name, **kwargs) + + @deprecated("deleteTaskTag is deprecated; use delete_task_tag instead") + @typing_deprecated("deleteTaskTag is deprecated; use delete_task_tag instead") def deleteTaskTag(self, tag: MetadataTag, taskName: str): + """Delete a tag from a task. + + .. deprecated:: + Use delete_task_tag() instead. + + Args: + tag: Tag to delete + taskName: Name of the task + """ tagStr = TagString(tag.key, tag.type, tag.value) - self.tagsApi.delete_task_tag(tagStr, taskName) + self._tags_api.delete_task_tag(tagStr, taskName) + + def delete_task_tag(self, tag: MetadataTag, task_name: str, **kwargs) -> None: + """Delete a tag from a task. + + Args: + tag: Tag to delete + task_name: Name of the task + **kwargs: Additional optional parameters to pass to the API - def getTaskTags(self, taskName: str) -> List[MetadataTag]: - return self.tagsApi.get_task_tags(taskName) + Returns: + None + Example: + ```python + from conductor.client.orkes.models.metadata_tag import MetadataTag + + tag = MetadataTag(key="priority", type="METADATA", value="low") + metadata_client.delete_task_tag(tag, "process_payment") + ``` + """ + tag_str = TagString(tag.key, tag.type, tag.value) + self._tags_api.delete_task_tag(tag=tag_str, task_name=task_name, **kwargs) + + @deprecated("getTaskTags is deprecated; use get_task_tags instead") + @typing_deprecated("getTaskTags is deprecated; use get_task_tags instead") + def getTaskTags(self, taskName: str) -> object: + """Get tags for a task. + + .. deprecated:: + Use get_task_tags() instead. + + Args: + taskName: Name of the task + + Returns: + Response object + """ + return self._tags_api.get_task_tags(taskName) + + def get_task_tags(self, task_name: str, **kwargs) -> List[MetadataTag]: + """Get tags for a task. + + Args: + task_name: Name of the task + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of MetadataTag instances + + Example: + ```python + tags = metadata_client.get_task_tags("process_payment") + for tag in tags: + print(f"Tag: {tag.key}={tag.value}") + ``` + """ + tags = self._tags_api.get_task_tags(task_name=task_name, **kwargs) + result = self.api_client.deserialize_class(tags, "List[Tag]") + return result + + @deprecated("setTaskTags is deprecated; use set_task_tags instead") + @typing_deprecated("setTaskTags is deprecated; use set_task_tags instead") def setTaskTags(self, tags: List[MetadataTag], taskName: str): - self.tagsApi.set_task_tags(tags, taskName) + """Set tags for a task. + + .. deprecated:: + Use set_task_tags() instead. + + Args: + tags: List of tags to set + taskName: Name of the task + """ + self._tags_api.set_task_tags(tags, taskName) + + def set_task_tags(self, tags: List[MetadataTag], task_name: str, **kwargs) -> None: + """Set tags for a task, replacing any existing tags. + + Args: + tags: List of tags to set + task_name: Name of the task + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.orkes.models.metadata_tag import MetadataTag + + tags = [ + MetadataTag(key="priority", type="METADATA", value="high"), + MetadataTag(key="category", type="METADATA", value="payment") + ] + metadata_client.set_task_tags(tags, "process_payment") + ``` + """ + self._tags_api.set_task_tags(body=tags, task_name=task_name, **kwargs) + @deprecated("setWorkflowRateLimit is deprecated; use set_workflow_rate_limit instead") + @typing_deprecated("setWorkflowRateLimit is deprecated; use set_workflow_rate_limit instead") def setWorkflowRateLimit(self, rateLimit: int, workflowName: str): - self.removeWorkflowRateLimit(workflowName) + """Set rate limit for a workflow. + + .. deprecated:: + Use set_workflow_rate_limit() instead. + + Args: + rateLimit: Rate limit value + workflowName: Name of the workflow + """ + self.remove_workflow_rate_limit(workflow_name=workflowName) rateLimitTag = RateLimitTag(workflowName, rateLimit) - self.tagsApi.add_workflow_tag(rateLimitTag, workflowName) + self._tags_api.add_workflow_tag(rateLimitTag, workflowName) + + def set_workflow_rate_limit(self, rate_limit: int, workflow_name: str, **kwargs) -> None: + """Set rate limit for a workflow. + + Args: + rate_limit: Rate limit value (executions per time window) + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + Example: + ```python + # Limit to 10 concurrent executions + metadata_client.set_workflow_rate_limit(10, "order_workflow") + ``` + """ + self.remove_workflow_rate_limit(workflow_name) + rate_limit_tag = RateLimitTag(workflow_name, rate_limit) + self._tags_api.add_workflow_tag(tag=rate_limit_tag, name=workflow_name, **kwargs) + + @deprecated("getWorkflowRateLimit is deprecated; use get_workflow_rate_limit instead") + @typing_deprecated("getWorkflowRateLimit is deprecated; use get_workflow_rate_limit instead") def getWorkflowRateLimit(self, workflowName: str) -> Optional[int]: - tags = self.tagsApi.get_workflow_tags(workflowName) + """Get rate limit for a workflow. + + .. deprecated:: + Use get_workflow_rate_limit() instead. + + Args: + workflowName: Name of the workflow + + Returns: + Rate limit value or None + """ + tags = self._tags_api.get_workflow_tags(workflowName) for tag in tags: if tag.type == "RATE_LIMIT" and tag.key == workflowName: return tag.value return None - def removeWorkflowRateLimit(self, workflowName: str): + def get_workflow_rate_limit(self, workflow_name: str, **kwargs) -> Optional[int]: + """Get rate limit for a workflow. + + Args: + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + Rate limit value or None if not set + + Example: + ```python + limit = metadata_client.get_workflow_rate_limit("order_workflow") + if limit: + print(f"Rate limit: {limit} concurrent executions") + ``` + """ + tags = self._tags_api.get_workflow_tags(name=workflow_name, **kwargs) + for tag in tags: + if tag.type == "RATE_LIMIT" and tag.key == workflow_name: + return tag.value + + return None + + @deprecated("removeWorkflowRateLimit is deprecated; use remove_workflow_rate_limit instead") + @typing_deprecated( + "removeWorkflowRateLimit is deprecated; use remove_workflow_rate_limit instead" + ) + def removeWorkflowRateLimit(self, workflowName: str) -> None: + """Remove rate limit for a workflow. + + .. deprecated:: + Use remove_workflow_rate_limit() instead. + + Args: + workflowName: Name of the workflow + """ current_rate_limit = self.getWorkflowRateLimit(workflowName) if current_rate_limit: rateLimitTag = RateLimitTag(workflowName, current_rate_limit) - self.tagsApi.delete_workflow_tag(rateLimitTag, workflowName) + self._tags_api.delete_workflow_tag(rateLimitTag, workflowName) + + def remove_workflow_rate_limit(self, workflow_name: str, **kwargs) -> None: + """Remove rate limit for a workflow. + + Args: + workflow_name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + metadata_client.remove_workflow_rate_limit("order_workflow") + ``` + """ + current_rate_limit = self.get_workflow_rate_limit(workflow_name=workflow_name, **kwargs) + if current_rate_limit: + rate_limit_tag = RateLimitTag(workflow_name, current_rate_limit) + self._tags_api.delete_workflow_tag(tag=rate_limit_tag, name=workflow_name, **kwargs) + + def upload_definitions_to_s3(self, **kwargs) -> None: + """Upload workflow and task definitions to S3. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + metadata_client.upload_definitions_to_s3() + ``` + """ + self._metadata_api.upload_workflows_and_tasks_definitions_to_s3(**kwargs) + + def upload_bpmn_file(self, body: IncomingBpmnFile, **kwargs) -> List[ExtendedWorkflowDef]: + """Upload a BPMN file and convert it to workflow definitions. + + Args: + body: BPMN file content + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of ExtendedWorkflowDef instances created from the BPMN file + + Example: + ```python + from conductor.client.http.models.incoming_bpmn_file import IncomingBpmnFile + + with open("workflow.bpmn", "r") as f: + bpmn_content = f.read() + + bpmn_file = IncomingBpmnFile(content=bpmn_content) + workflow_defs = metadata_client.upload_bpmn_file(bpmn_file) + + for workflow_def in workflow_defs: + print(f"Created workflow: {workflow_def.name}") + ``` + """ + return self._metadata_api.upload_bpmn_file(body=body, **kwargs) diff --git a/src/conductor/client/orkes/orkes_prompt_client.py b/src/conductor/client/orkes/orkes_prompt_client.py index 7ad8499ad..9d76662b4 100644 --- a/src/conductor/client/orkes/orkes_prompt_client.py +++ b/src/conductor/client/orkes/orkes_prompt_client.py @@ -4,42 +4,180 @@ from conductor.client.codegen.rest import ApiException from conductor.client.configuration.configuration import Configuration -from conductor.client.http.models.prompt_template import PromptTemplate +from conductor.client.http.models.message_template import MessageTemplate from conductor.client.http.models.prompt_template_test_request import PromptTemplateTestRequest -from conductor.client.orkes.models.metadata_tag import MetadataTag +from conductor.client.http.models.tag import Tag from conductor.client.orkes.orkes_base_client import OrkesBaseClient from conductor.client.prompt_client import PromptClient class OrkesPromptClient(OrkesBaseClient, PromptClient): def __init__(self, configuration: Configuration): - super(OrkesPromptClient, self).__init__(configuration) + """Initialize the OrkesPromptClient with configuration. - def save_prompt(self, prompt_name: str, description: str, prompt_template: str): - self.promptApi.save_message_template(prompt_template, description, prompt_name) + Args: + configuration: Configuration object containing server settings and authentication - def get_prompt(self, prompt_name: str) -> Optional[PromptTemplate]: + Example: + ```python + from conductor.client.configuration.configuration import Configuration + + config = Configuration(server_api_url="http://localhost:8080/api") + prompt_client = OrkesPromptClient(config) + ``` + """ + super().__init__(configuration) + + def save_prompt( + self, prompt_name: str, description: str, prompt_template: str, **kwargs + ) -> None: + """Create or update a prompt template. + + Args: + prompt_name: Unique name for the template + description: Human-readable description of the template's purpose + prompt_template: Template text with variables in ${variable} format + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Create a customer service template + prompt_client.save_prompt( + "customer_greeting", + "Greeting template for customer service", + "Hello ${customer_name}, welcome to ${company}! How can I help you today?" + ) + ``` + """ + self._prompt_api.save_message_template( + body=prompt_template, description=description, name=prompt_name, **kwargs + ) + + def get_prompt(self, prompt_name: str, **kwargs) -> Optional[MessageTemplate]: + """Get a prompt template by name. + + Args: + prompt_name: Name of the template to retrieve + **kwargs: Additional optional parameters to pass to the API + + Returns: + MessageTemplate instance if found, None otherwise + + Example: + ```python + template = prompt_client.get_prompt("customer_greeting") + if template: + print(f"Template: {template.template}") + ``` + """ try: - return self.promptApi.get_message_template(prompt_name) + return self._prompt_api.get_message_template(name=prompt_name, **kwargs) except ApiException as e: if e.is_not_found(): return None raise e - def get_prompts(self): - return self.promptApi.get_message_templates() + def get_prompts(self, **kwargs) -> List[MessageTemplate]: + """Get all prompt templates. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of MessageTemplate instances + + Example: + ```python + templates = prompt_client.get_prompts() + for template in templates: + print(f"Template: {template.name}, Description: {template.description}") + ``` + """ + return self._prompt_api.get_message_templates(**kwargs) + + def delete_prompt(self, prompt_name: str, **kwargs) -> None: + """Delete a prompt template by name. + + Args: + prompt_name: Name of the template to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + prompt_client.delete_prompt("old_template") + ``` + """ + self._prompt_api.delete_message_template(name=prompt_name, **kwargs) + + def get_tags_for_prompt_template(self, prompt_name: str, **kwargs) -> List[Tag]: + """Get tags for a prompt template. - def delete_prompt(self, prompt_name: str): - self.promptApi.delete_message_template(prompt_name) + Args: + prompt_name: Name of the template + **kwargs: Additional optional parameters to pass to the API - def get_tags_for_prompt_template(self, prompt_name: str) -> List[MetadataTag]: - return self.promptApi.get_tags_for_prompt_template(prompt_name) + Returns: + List of Tag instances - def update_tag_for_prompt_template(self, prompt_name: str, tags: List[MetadataTag]): - self.promptApi.put_tag_for_prompt_template(tags, prompt_name) + Example: + ```python + tags = prompt_client.get_tags_for_prompt_template("customer_greeting") + for tag in tags: + print(f"Tag: {tag.key}={tag.value}") + ``` + """ + return self._prompt_api.get_tags_for_prompt_template(name=prompt_name, **kwargs) - def delete_tag_for_prompt_template(self, prompt_name: str, tags: List[MetadataTag]): - self.promptApi.delete_tag_for_prompt_template(tags, prompt_name) + def update_tag_for_prompt_template(self, prompt_name: str, tags: List[Tag], **kwargs) -> None: + """Update tags for a prompt template. + + Args: + prompt_name: Name of the template + tags: List of tags to set + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.tag import Tag + + tags = [ + Tag(key="category", value="customer_service"), + Tag(key="language", value="english") + ] + prompt_client.update_tag_for_prompt_template("customer_greeting", tags) + ``` + """ + self._prompt_api.put_tag_for_prompt_template(body=tags, name=prompt_name, **kwargs) + + def delete_tag_for_prompt_template(self, prompt_name: str, tags: List[Tag], **kwargs) -> None: + """Delete tags for a prompt template. + + Args: + prompt_name: Name of the template + tags: List of tags to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.tag import Tag + + tags_to_delete = [Tag(key="category", value="old_category")] + prompt_client.delete_tag_for_prompt_template("customer_greeting", tags_to_delete) + ``` + """ + self._prompt_api.delete_tag_for_prompt_template(body=tags, name=prompt_name, **kwargs) def test_prompt( self, @@ -50,7 +188,36 @@ def test_prompt( temperature: float = 0.1, top_p: float = 0.9, stop_words: Optional[List[str]] = None, + **kwargs, ) -> str: + """Test a prompt template with AI model. + + Args: + prompt_text: Prompt template text to test + variables: Dictionary of variable values for substitution + ai_integration: Name of the AI integration (e.g., "openai") + text_complete_model: Model name (e.g., "gpt-4", "gpt-3.5-turbo") + temperature: Sampling temperature (0.0 to 1.0), default 0.1 + top_p: Nucleus sampling parameter (0.0 to 1.0), default 0.9 + stop_words: Optional list of stop words + **kwargs: Additional optional parameters to pass to the API + + Returns: + Generated text from the AI model + + Example: + ```python + result = prompt_client.test_prompt( + prompt_text="Summarize this: ${text}", + variables={"text": "Long article content..."}, + ai_integration="openai", + text_complete_model="gpt-4", + temperature=0.7, + top_p=0.9 + ) + print(f"Generated: {result}") + ``` + """ request = PromptTemplateTestRequest() request.prompt = prompt_text request.llm_provider = ai_integration @@ -60,4 +227,35 @@ def test_prompt( request.top_p = top_p if stop_words is not None: request.stop_words = stop_words - return self.promptApi.test_message_template(request) + return self._prompt_api.test_message_template(request, **kwargs) + + def create_message_templates(self, body: List[MessageTemplate], **kwargs) -> None: + """Create multiple message templates in bulk. + + Args: + body: List of message templates to create + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.message_template import MessageTemplate + + templates = [ + MessageTemplate( + name="greeting", + description="Greeting template", + template="Hello ${name}!" + ), + MessageTemplate( + name="farewell", + description="Farewell template", + template="Goodbye ${name}, see you soon!" + ) + ] + prompt_client.create_message_templates(templates) + ``` + """ + self._prompt_api.create_message_templates(body=body, **kwargs) diff --git a/src/conductor/client/orkes/orkes_scheduler_client.py b/src/conductor/client/orkes/orkes_scheduler_client.py index 1178a7d9d..2c80cbd82 100644 --- a/src/conductor/client/orkes/orkes_scheduler_client.py +++ b/src/conductor/client/orkes/orkes_scheduler_client.py @@ -1,12 +1,17 @@ from __future__ import annotations -from typing import List, Optional, Tuple +from typing import List, Optional + +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated from conductor.client.configuration.configuration import Configuration +from conductor.client.http.models import WorkflowScheduleModel from conductor.client.http.models.save_schedule_request import SaveScheduleRequest from conductor.client.http.models.search_result_workflow_schedule_execution_model import ( SearchResultWorkflowScheduleExecutionModel, ) +from conductor.client.http.models.tag import Tag from conductor.client.http.models.workflow_schedule import WorkflowSchedule from conductor.client.orkes.models.metadata_tag import MetadataTag from conductor.client.orkes.orkes_base_client import OrkesBaseClient @@ -15,20 +20,97 @@ class OrkesSchedulerClient(OrkesBaseClient, SchedulerClient): def __init__(self, configuration: Configuration): - super(OrkesSchedulerClient, self).__init__(configuration) + """Initialize the OrkesSchedulerClient with configuration. + + Args: + configuration: Configuration object containing server settings and authentication + + Example: + ```python + from conductor.client.configuration.configuration import Configuration + + config = Configuration(server_api_url="http://localhost:8080/api") + scheduler_client = OrkesSchedulerClient(config) + ``` + """ + super().__init__(configuration) + + def save_schedule(self, save_schedule_request: SaveScheduleRequest, **kwargs) -> None: + """Create or update a workflow schedule. + + Args: + save_schedule_request: Schedule configuration including cron expression and workflow details + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.save_schedule_request import SaveScheduleRequest + from conductor.client.http.models.start_workflow_request import StartWorkflowRequest + + # Create a daily schedule at 9 AM + start_request = StartWorkflowRequest( + name="daily_report", + version=1, + input={"report_type": "daily"} + ) + + schedule_request = SaveScheduleRequest( + name="daily_report_schedule", + cron_expression="0 9 * * *", + start_workflow_request=start_request, + paused=False + ) + + scheduler_client.save_schedule(schedule_request) + ``` + """ + self._scheduler_api.save_schedule(body=save_schedule_request, **kwargs) + + def get_schedule(self, name: str, **kwargs) -> WorkflowSchedule: + """Get a workflow schedule by name. + + Args: + name: Name of the schedule to retrieve + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowSchedule instance + + Example: + ```python + schedule = scheduler_client.get_schedule("daily_report_schedule") + print(f"Schedule: {schedule.name}, Cron: {schedule.cron_expression}") + ``` + """ + return self._scheduler_api.get_schedule(name=name, **kwargs) + + def get_all_schedules( + self, workflow_name: Optional[str] = None, **kwargs + ) -> List[WorkflowScheduleModel]: + """Get all workflow schedules, optionally filtered by workflow name. + + Args: + workflow_name: Optional workflow name to filter schedules + **kwargs: Additional optional parameters to pass to the API - def save_schedule(self, save_schedule_request: SaveScheduleRequest): - self.schedulerResourceApi.save_schedule(save_schedule_request) + Returns: + List of WorkflowScheduleModel instances - def get_schedule(self, name: str) -> Tuple[Optional[WorkflowSchedule], str]: - return self.schedulerResourceApi.get_schedule(name) + Example: + ```python + # Get all schedules + all_schedules = scheduler_client.get_all_schedules() - def get_all_schedules(self, workflow_name: Optional[str] = None) -> List[WorkflowSchedule]: - kwargs = {} + # Get schedules for a specific workflow + report_schedules = scheduler_client.get_all_schedules("daily_report") + ``` + """ if workflow_name: kwargs.update({"workflow_name": workflow_name}) - - return self.schedulerResourceApi.get_all_schedules(**kwargs) + return self._scheduler_api.get_all_schedules(**kwargs) def get_next_few_schedule_execution_times( self, @@ -36,30 +118,126 @@ def get_next_few_schedule_execution_times( schedule_start_time: Optional[int] = None, schedule_end_time: Optional[int] = None, limit: Optional[int] = None, + **kwargs, ) -> List[int]: - kwargs = {} + """Get the next few execution times for a cron expression. + + Args: + cron_expression: Cron expression to evaluate + schedule_start_time: Optional start time (epoch milliseconds) + schedule_end_time: Optional end time (epoch milliseconds) + limit: Optional maximum number of execution times to return + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of execution times as epoch milliseconds + + Example: + ```python + # Get next 5 executions for daily at 9 AM + times = scheduler_client.get_next_few_schedule_execution_times( + cron_expression="0 9 * * *", + limit=5 + ) + + for time in times: + from datetime import datetime + dt = datetime.fromtimestamp(time / 1000) + print(f"Next execution: {dt}") + ``` + """ if schedule_start_time: kwargs.update({"schedule_start_time": schedule_start_time}) if schedule_end_time: kwargs.update({"schedule_end_time": schedule_end_time}) if limit: kwargs.update({"limit": limit}) - return self.schedulerResourceApi.get_next_few_schedules(cron_expression, **kwargs) + return self._scheduler_api.get_next_few_schedules(cron_expression=cron_expression, **kwargs) + + def delete_schedule(self, name: str, **kwargs) -> None: + """Delete a workflow schedule. + + Args: + name: Name of the schedule to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + scheduler_client.delete_schedule("old_schedule") + ``` + """ + self._scheduler_api.delete_schedule(name=name, **kwargs) + + def pause_schedule(self, name: str, **kwargs) -> None: + """Pause a workflow schedule. + + Args: + name: Name of the schedule to pause + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + scheduler_client.pause_schedule("daily_report_schedule") + ``` + """ + self._scheduler_api.pause_schedule(name=name, **kwargs) + + def pause_all_schedules(self, **kwargs) -> None: + """Pause all workflow schedules. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Pause all schedules (e.g., during maintenance) + scheduler_client.pause_all_schedules() + ``` + """ + self._scheduler_api.pause_all_schedules(**kwargs) - def delete_schedule(self, name: str): - self.schedulerResourceApi.delete_schedule(name) + def resume_schedule(self, name: str, **kwargs) -> None: + """Resume a paused workflow schedule. - def pause_schedule(self, name: str): - self.schedulerResourceApi.pause_schedule(name) + Args: + name: Name of the schedule to resume + **kwargs: Additional optional parameters to pass to the API - def pause_all_schedules(self): - self.schedulerResourceApi.pause_all_schedules() + Returns: + None - def resume_schedule(self, name: str): - self.schedulerResourceApi.resume_schedule(name) + Example: + ```python + scheduler_client.resume_schedule("daily_report_schedule") + ``` + """ + self._scheduler_api.resume_schedule(name=name, **kwargs) - def resume_all_schedules(self): - self.schedulerResourceApi.resume_all_schedules() + def resume_all_schedules(self, **kwargs) -> None: + """Resume all paused workflow schedules. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Resume all schedules (e.g., after maintenance) + scheduler_client.resume_all_schedules() + ``` + """ + self._scheduler_api.resume_all_schedules(**kwargs) def search_schedule_executions( self, @@ -68,8 +246,35 @@ def search_schedule_executions( sort: Optional[str] = None, free_text: Optional[str] = None, query: Optional[str] = None, + **kwargs, ) -> SearchResultWorkflowScheduleExecutionModel: - kwargs = {} + """Search workflow schedule executions. + + Args: + start: Start index for pagination + size: Number of results to return + sort: Sort order (e.g., "startTime:DESC") + free_text: Free text search query + query: Structured query string + **kwargs: Additional optional parameters to pass to the API + + Returns: + SearchResultWorkflowScheduleExecutionModel with execution results + + Example: + ```python + # Search recent executions + results = scheduler_client.search_schedule_executions( + start=0, + size=20, + sort="startTime:DESC" + ) + + print(f"Total executions: {results.total_hits}") + for execution in results.results: + print(f"Execution: {execution.workflow_id}, Status: {execution.state}") + ``` + """ if start: kwargs.update({"start": start}) if size: @@ -80,16 +285,143 @@ def search_schedule_executions( kwargs.update({"free_text": free_text}) if query: kwargs.update({"query": query}) - return self.schedulerResourceApi.search_v2(**kwargs) + return self._scheduler_api.search_v2(**kwargs) + + def requeue_all_execution_records(self, **kwargs) -> None: + """Requeue all pending schedule execution records. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + # Requeue failed executions + scheduler_client.requeue_all_execution_records() + ``` + """ + self._scheduler_api.requeue_all_execution_records(**kwargs) + + @deprecated("set_scheduler_tags is deprecated; use set_scheduler_tags_validated instead") + @typing_deprecated("set_scheduler_tags is deprecated; use set_scheduler_tags_validated instead") + def set_scheduler_tags(self, tags: List[MetadataTag], name: str, **kwargs) -> None: + """Set tags for a schedule. + + .. deprecated:: + Use set_scheduler_tags_validated() instead. + + Args: + tags: List of tags to set + name: Name of the schedule + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + """ + self._scheduler_api.put_tag_for_schedule(tags, name, **kwargs) + + def set_scheduler_tags_validated(self, tags: List[Tag], name: str, **kwargs) -> None: + """Set tags for a schedule. + + Args: + tags: List of tags to set + name: Name of the schedule + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.tag import Tag + + tags = [ + Tag(key="environment", value="production"), + Tag(key="frequency", value="daily") + ] + scheduler_client.set_scheduler_tags_validated(tags, "daily_report_schedule") + ``` + """ + self._scheduler_api.put_tag_for_schedule(body=tags, name=name, **kwargs) + + def get_scheduler_tags(self, name: str, **kwargs) -> List[Tag]: + """Get tags for a schedule. + + Args: + name: Name of the schedule + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of Tag instances + + Example: + ```python + tags = scheduler_client.get_scheduler_tags("daily_report_schedule") + for tag in tags: + print(f"Tag: {tag.key}={tag.value}") + ``` + """ + return self._scheduler_api.get_tags_for_schedule(name=name, **kwargs) + + @deprecated("delete_scheduler_tags is deprecated; use delete_scheduler_tags_validated instead") + @typing_deprecated( + "delete_scheduler_tags is deprecated; use delete_scheduler_tags_validated instead" + ) + def delete_scheduler_tags(self, tags: List[MetadataTag], name: str, **kwargs) -> None: + """Delete tags for a schedule. + + .. deprecated:: + Use delete_scheduler_tags_validated() instead. + + Args: + tags: List of tags to delete + name: Name of the schedule + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + """ + return self._scheduler_api.delete_tag_for_schedule(tags, name) + + def delete_scheduler_tags_validated(self, tags: List[Tag], name: str, **kwargs) -> None: + """Delete tags for a schedule. + + Args: + tags: List of tags to delete + name: Name of the schedule + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.tag import Tag + + tags_to_delete = [Tag(key="environment", value="staging")] + scheduler_client.delete_scheduler_tags_validated(tags_to_delete, "daily_report_schedule") + ``` + """ + self._scheduler_api.delete_tag_for_schedule(body=tags, name=name, **kwargs) - def requeue_all_execution_records(self): - self.schedulerResourceApi.requeue_all_execution_records() + def get_schedules_by_tag(self, tag: str, **kwargs) -> List[WorkflowScheduleModel]: + """Get all schedules with a specific tag. - def set_scheduler_tags(self, tags: List[MetadataTag], name: str): - self.schedulerResourceApi.put_tag_for_schedule(tags, name) + Args: + tag: Tag to filter by (format: "key:value") + **kwargs: Additional optional parameters to pass to the API - def get_scheduler_tags(self, name: str) -> List[MetadataTag]: - return self.schedulerResourceApi.get_tags_for_schedule(name) + Returns: + List of WorkflowScheduleModel instances - def delete_scheduler_tags(self, tags: List[MetadataTag], name: str) -> List[MetadataTag]: - return self.schedulerResourceApi.delete_tag_for_schedule(tags, name) + Example: + ```python + # Get all production schedules + prod_schedules = scheduler_client.get_schedules_by_tag("environment:production") + for schedule in prod_schedules: + print(f"Schedule: {schedule.name}") + ``` + """ + return self._scheduler_api.get_schedules_by_tag(tag=tag, **kwargs) diff --git a/src/conductor/client/orkes/orkes_schema_client.py b/src/conductor/client/orkes/orkes_schema_client.py index 32a91cf86..d4a8709bb 100644 --- a/src/conductor/client/orkes/orkes_schema_client.py +++ b/src/conductor/client/orkes/orkes_schema_client.py @@ -1,5 +1,8 @@ from typing import List +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated + from conductor.client.configuration.configuration import Configuration from conductor.client.http.models.schema_def import SchemaDef from conductor.client.orkes.orkes_base_client import OrkesBaseClient @@ -8,19 +11,144 @@ class OrkesSchemaClient(OrkesBaseClient, SchemaClient): def __init__(self, configuration: Configuration): - super(OrkesSchemaClient, self).__init__(configuration) + """Initialize the OrkesSchemaClient with configuration. + + Args: + configuration: Configuration object containing server settings and authentication + + Example: + ```python + from conductor.client.configuration.configuration import Configuration + + config = Configuration(server_api_url="http://localhost:8080/api") + schema_client = OrkesSchemaClient(config) + ``` + """ + super().__init__(configuration) + + def register_schema(self, schema: SchemaDef, **kwargs) -> None: + """Register a schema definition. + + Args: + schema: Schema definition to register + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.schema_def import SchemaDef + + schema = SchemaDef( + name="order_schema", + version=1, + type="JSON", + schema={ + "type": "object", + "properties": { + "order_id": {"type": "string"}, + "total": {"type": "number"} + } + } + ) + schema_client.register_schema(schema) + ``` + """ + self._schema_api.save(body=schema, **kwargs) + + def get_schema(self, schema_name: str, version: int, **kwargs) -> SchemaDef: + """Get a schema definition by name and version. + + Args: + schema_name: Name of the schema + version: Version number of the schema + **kwargs: Additional optional parameters to pass to the API + + Returns: + SchemaDef instance + + Example: + ```python + schema = schema_client.get_schema("order_schema", 1) + print(f"Schema: {schema.name}, Type: {schema.type}") + ``` + """ + return self._schema_api.get_schema_by_name_and_version( + name=schema_name, version=version, **kwargs + ) + + def get_all_schemas(self, **kwargs) -> List[SchemaDef]: + """Get all registered schemas. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of SchemaDef instances + + Example: + ```python + schemas = schema_client.get_all_schemas() + for schema in schemas: + print(f"Schema: {schema.name}, Version: {schema.version}") + ``` + """ + return self._schema_api.get_all_schemas(**kwargs) + + @deprecated("delete_schema is deprecated; use delete_schema_by_name_and_version instead") + @typing_deprecated("delete_schema is deprecated; use delete_schema_by_name_and_version instead") + def delete_schema(self, schema_name: str, version: int, **kwargs) -> None: + """Delete a schema by name and version. + + .. deprecated:: + Use delete_schema_by_name_and_version() instead. + + Args: + schema_name: Name of the schema to delete + version: Version number of the schema + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + """ + self._schema_api.delete_schema_by_name_and_version( + name=schema_name, version=version, **kwargs + ) + + def delete_schema_by_name_and_version(self, schema_name: str, version: int, **kwargs) -> None: + """Delete a schema by name and version. + + Args: + schema_name: Name of the schema to delete + version: Version number of the schema + **kwargs: Additional optional parameters to pass to the API + + Returns: + None - def register_schema(self, schema: SchemaDef) -> None: - self.schemaApi.save(schema) + Example: + ```python + schema_client.delete_schema_by_name_and_version("old_schema", 1) + ``` + """ + self._schema_api.delete_schema_by_name_and_version( + name=schema_name, version=version, **kwargs + ) - def get_schema(self, schema_name: str, version: int) -> SchemaDef: - return self.schemaApi.get_schema_by_name_and_version(name=schema_name, version=version) + def delete_schema_by_name(self, schema_name: str, **kwargs) -> None: + """Delete all versions of a schema by name. - def get_all_schemas(self) -> List[SchemaDef]: - return self.schemaApi.get_all_schemas() + Args: + schema_name: Name of the schema to delete (all versions) + **kwargs: Additional optional parameters to pass to the API - def delete_schema(self, schema_name: str, version: int) -> None: - self.schemaApi.delete_schema_by_name_and_version(name=schema_name, version=version) + Returns: + None - def delete_schema_by_name(self, schema_name: str) -> None: - self.schemaApi.delete_schema_by_name(name=schema_name) + Example: + ```python + schema_client.delete_schema_by_name("deprecated_schema") + ``` + """ + self._schema_api.delete_schema_by_name(name=schema_name, **kwargs) diff --git a/src/conductor/client/orkes/orkes_secret_client.py b/src/conductor/client/orkes/orkes_secret_client.py index 70e3d7a23..89e3327ee 100644 --- a/src/conductor/client/orkes/orkes_secret_client.py +++ b/src/conductor/client/orkes/orkes_secret_client.py @@ -1,38 +1,269 @@ from typing import List, Set +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated + from conductor.client.configuration.configuration import Configuration -from conductor.client.orkes.models.metadata_tag import MetadataTag +from conductor.client.http.models.extended_secret import ExtendedSecret +from conductor.client.http.models.tag import Tag from conductor.client.orkes.orkes_base_client import OrkesBaseClient from conductor.client.secret_client import SecretClient class OrkesSecretClient(OrkesBaseClient, SecretClient): def __init__(self, configuration: Configuration): - super(OrkesSecretClient, self).__init__(configuration) + """Initialize the OrkesSecretClient with configuration. + + Args: + configuration: Configuration object containing server settings and authentication + + Example: + ```python + from conductor.client.configuration.configuration import Configuration + + config = Configuration(server_api_url="http://localhost:8080/api") + secret_client = OrkesSecretClient(config) + ``` + """ + super().__init__(configuration) + + def put_secret(self, key: str, value: str, **kwargs) -> None: + """Store a secret value by key. + + Args: + key: Unique key for the secret + value: Secret value to store + **kwargs: Additional optional parameters to pass to the API + + Returns: + None - def put_secret(self, key: str, value: str): - self.secretResourceApi.put_secret(value, key) + Example: + ```python + # Store database credentials + secret_client.put_secret("db_password", "mysecretpassword123") - def get_secret(self, key: str) -> str: - return self.secretResourceApi.get_secret(key) + # Store API keys + secret_client.put_secret("openai_api_key", "sk-...") + ``` + """ + self._secret_api.put_secret(body=value, key=key, **kwargs) + def get_secret(self, key: str, **kwargs) -> str: + """Get a secret value by key. + + Args: + key: Unique key for the secret + **kwargs: Additional optional parameters to pass to the API + + Returns: + The secret value + + Example: + ```python + password = secret_client.get_secret("db_password") + # Use password in workflow + ``` + """ + return self._secret_api.get_secret(key=key, **kwargs) + + @deprecated("list_all_secret_names is deprecated; use list_all_secret_names_validated instead") + @typing_deprecated( + "list_all_secret_names is deprecated; use list_all_secret_names_validated instead" + ) def list_all_secret_names(self) -> Set[str]: - return set(self.secretResourceApi.list_all_secret_names()) + """List all secret names. + + .. deprecated:: + Use list_all_secret_names_validated() instead. + + Returns: + Set of secret names + """ + return set(self._secret_api.list_all_secret_names()) + + def list_all_secret_names_validated(self, **kwargs) -> List[str]: + """List all secret names. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of secret names + + Example: + ```python + names = secret_client.list_all_secret_names_validated() + for name in names: + print(f"Secret: {name}") + ``` + """ + return self._secret_api.list_all_secret_names(**kwargs) + + def list_secrets_that_user_can_grant_access_to(self, **kwargs) -> List[str]: + """List secrets that the current user can grant access to. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of secret names + + Example: + ```python + grantable_secrets = secret_client.list_secrets_that_user_can_grant_access_to() + print(f"Can grant access to {len(grantable_secrets)} secrets") + ``` + """ + return self._secret_api.list_secrets_that_user_can_grant_access_to(**kwargs) + + def delete_secret(self, key: str, **kwargs) -> None: + """Delete a secret by key. + + Args: + key: Unique key for the secret to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + secret_client.delete_secret("old_api_key") + ``` + """ + self._secret_api.delete_secret(key=key, **kwargs) + + def secret_exists(self, key: str, **kwargs) -> object: # type: ignore[override] + """Check if a secret exists by key. + + Args: + key: Unique key for the secret + **kwargs: Additional optional parameters to pass to the API + + Returns: + Object indicating if secret exists + + Example: + ```python + if secret_client.secret_exists("db_password"): + print("Secret exists") + ``` + """ + return self._secret_api.secret_exists(key=key, **kwargs) + + def set_secret_tags(self, tags: List[Tag], key: str, **kwargs) -> None: + """Set tags for a secret. + + Args: + tags: List of tags to set + key: Unique key for the secret + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.tag import Tag + + tags = [ + Tag(key="environment", value="production"), + Tag(key="type", value="database") + ] + secret_client.set_secret_tags(tags, "db_password") + ``` + """ + self._secret_api.put_tag_for_secret(body=tags, key=key, **kwargs) + + def get_secret_tags(self, key: str, **kwargs) -> List[Tag]: + """Get tags for a secret. + + Args: + key: Unique key for the secret + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of Tag instances + + Example: + ```python + tags = secret_client.get_secret_tags("db_password") + for tag in tags: + print(f"Tag: {tag.key}={tag.value}") + ``` + """ + return self._secret_api.get_tags(key=key, **kwargs) + + def delete_secret_tags(self, tags: List[Tag], key: str, **kwargs) -> None: + """Delete tags for a secret. + + Args: + tags: List of tags to delete + key: Unique key for the secret + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.tag import Tag + + tags_to_delete = [Tag(key="environment", value="staging")] + secret_client.delete_secret_tags(tags_to_delete, "db_password") + ``` + """ + self._secret_api.delete_tag_for_secret(body=tags, key=key, **kwargs) + + def clear_local_cache(self, **kwargs) -> None: + """Clear the local secret cache. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + secret_client.clear_local_cache() + ``` + """ + self._secret_api.clear_local_cache(**kwargs) + + def clear_redis_cache(self, **kwargs) -> None: + """Clear the Redis secret cache. + + Args: + **kwargs: Additional optional parameters to pass to the API - def list_secrets_that_user_can_grant_access_to(self) -> List[str]: - return self.secretResourceApi.list_secrets_that_user_can_grant_access_to() + Returns: + None - def delete_secret(self, key: str): - self.secretResourceApi.delete_secret(key) + Example: + ```python + secret_client.clear_redis_cache() + ``` + """ + self._secret_api.clear_redis_cache(**kwargs) - def secret_exists(self, key: str) -> bool: - return self.secretResourceApi.secret_exists(key) + def list_secrets_with_tags_that_user_can_grant_access_to( + self, **kwargs + ) -> List[ExtendedSecret]: + """List secrets with tags that the current user can grant access to. - def set_secret_tags(self, tags: List[MetadataTag], key: str): - self.secretResourceApi.put_tag_for_secret(tags, key) + Args: + **kwargs: Additional optional parameters to pass to the API - def get_secret_tags(self, key: str) -> List[MetadataTag]: - return self.secretResourceApi.get_tags(key) + Returns: + List of ExtendedSecret instances with tag information - def delete_secret_tags(self, tags: List[MetadataTag], key: str) -> List[MetadataTag]: - return self.secretResourceApi.delete_tag_for_secret(tags, key) + Example: + ```python + secrets = secret_client.list_secrets_with_tags_that_user_can_grant_access_to() + for secret in secrets: + print(f"Secret: {secret.key}, Tags: {secret.tags}") + ``` + """ + return self._secret_api.list_secrets_with_tags_that_user_can_grant_access_to(**kwargs) diff --git a/src/conductor/client/orkes/orkes_service_registry_client.py b/src/conductor/client/orkes/orkes_service_registry_client.py index 697d1201c..06fd69c00 100644 --- a/src/conductor/client/orkes/orkes_service_registry_client.py +++ b/src/conductor/client/orkes/orkes_service_registry_client.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional +from typing import Dict, List, Optional from conductor.client.configuration.configuration import Configuration from conductor.client.http.models.circuit_breaker_transition_response import ( @@ -15,63 +15,361 @@ class OrkesServiceRegistryClient(OrkesBaseClient, ServiceRegistryClient): def __init__(self, configuration: Configuration): - super(OrkesServiceRegistryClient, self).__init__(configuration) + """Initialize the OrkesServiceRegistryClient with configuration. - def get_registered_services(self) -> List[ServiceRegistry]: - return self.serviceRegistryResourceApi.get_registered_services() + Args: + configuration: Configuration object containing server settings and authentication - def get_service(self, name: str) -> ServiceRegistry: - return self.serviceRegistryResourceApi.get_service(name) + Example: + ```python + from conductor.client.configuration.configuration import Configuration - def add_or_update_service(self, service_registry: ServiceRegistry) -> None: - self.serviceRegistryResourceApi.add_or_update_service(service_registry) + config = Configuration(server_api_url="http://localhost:8080/api") + service_registry_client = OrkesServiceRegistryClient(config) + ``` + """ + super().__init__(configuration) - def remove_service(self, name: str) -> None: - self.serviceRegistryResourceApi.remove_service(name) + def get_registered_services(self, **kwargs) -> List[ServiceRegistry]: + """Get all registered services. - def open_circuit_breaker(self, name: str) -> CircuitBreakerTransitionResponse: - return self.serviceRegistryResourceApi.open_circuit_breaker(name) + Args: + **kwargs: Additional optional parameters to pass to the API - def close_circuit_breaker(self, name: str) -> CircuitBreakerTransitionResponse: - return self.serviceRegistryResourceApi.close_circuit_breaker(name) + Returns: + List of ServiceRegistry instances - def get_circuit_breaker_status(self, name: str) -> CircuitBreakerTransitionResponse: - return self.serviceRegistryResourceApi.get_circuit_breaker_status(name) + Example: + ```python + services = service_registry_client.get_registered_services() + for service in services: + print(f"Service: {service.name}, Host: {service.host}") + ``` + """ + return self._service_registry_api.get_registered_services(**kwargs) - def add_or_update_method(self, registry_name: str, method: ServiceMethod) -> None: - self.serviceRegistryResourceApi.add_or_update_method(registry_name, method) + def get_service(self, name: str, **kwargs) -> ServiceRegistry: + """Get a specific service by name. + + Args: + name: Name of the service to retrieve + **kwargs: Additional optional parameters to pass to the API + + Returns: + ServiceRegistry instance + + Example: + ```python + service = service_registry_client.get_service("payment-service") + print(f"Service: {service.name}, Port: {service.port}") + ``` + """ + return self._service_registry_api.get_service(name=name, **kwargs) + + def add_or_update_service(self, service_registry: ServiceRegistry, **kwargs) -> None: + """Add or update a service in the registry. + + Args: + service_registry: Service configuration to register + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.service_registry import ServiceRegistry + + service = ServiceRegistry( + name="payment-service", + host="payment.example.com", + port=8080, + protocol="https" + ) + service_registry_client.add_or_update_service(service) + ``` + """ + self._service_registry_api.add_or_update_service(body=service_registry, **kwargs) + + def remove_service(self, name: str, **kwargs) -> None: + """Remove a service from the registry. + + Args: + name: Name of the service to remove + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + service_registry_client.remove_service("old-payment-service") + ``` + """ + self._service_registry_api.remove_service(name=name, **kwargs) + + def open_circuit_breaker(self, name: str, **kwargs) -> CircuitBreakerTransitionResponse: + """Open the circuit breaker for a service. + + Args: + name: Name of the service + **kwargs: Additional optional parameters to pass to the API + + Returns: + CircuitBreakerTransitionResponse with the transition details + + Example: + ```python + response = service_registry_client.open_circuit_breaker("payment-service") + print(f"Circuit breaker state: {response.current_state}") + ``` + """ + return self._service_registry_api.open_circuit_breaker(name=name, **kwargs) + + def close_circuit_breaker(self, name: str, **kwargs) -> CircuitBreakerTransitionResponse: + """Close the circuit breaker for a service. + + Args: + name: Name of the service + **kwargs: Additional optional parameters to pass to the API + + Returns: + CircuitBreakerTransitionResponse with the transition details + + Example: + ```python + response = service_registry_client.close_circuit_breaker("payment-service") + print(f"Circuit breaker state: {response.current_state}") + ``` + """ + return self._service_registry_api.close_circuit_breaker(name=name, **kwargs) + + def get_circuit_breaker_status(self, name: str, **kwargs) -> CircuitBreakerTransitionResponse: + """Get the circuit breaker status for a service. + + Args: + name: Name of the service + **kwargs: Additional optional parameters to pass to the API + + Returns: + CircuitBreakerTransitionResponse with the current status + + Example: + ```python + status = service_registry_client.get_circuit_breaker_status("payment-service") + print(f"Circuit breaker state: {status.current_state}") + ``` + """ + return self._service_registry_api.get_circuit_breaker_status(name=name, **kwargs) + + def add_or_update_method(self, registry_name: str, method: ServiceMethod, **kwargs) -> None: + """Add or update a method for a registered service. + + Args: + registry_name: Name of the service registry + method: Service method configuration + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.service_method import ServiceMethod + + method = ServiceMethod( + name="processPayment", + service_name="payment-service", + method_type="POST" + ) + service_registry_client.add_or_update_method("payment-registry", method) + ``` + """ + self._service_registry_api.add_or_update_method( + registry_name=registry_name, body=method, **kwargs + ) def remove_method( - self, registry_name: str, service_name: str, method: str, method_type: str + self, registry_name: str, service_name: str, method: str, method_type: str, **kwargs ) -> None: - self.serviceRegistryResourceApi.remove_method( - registry_name, service_name, method, method_type + """Remove a method from a registered service. + + Args: + registry_name: Name of the service registry + service_name: Name of the service + method: Name of the method to remove + method_type: Type of the method (e.g., "POST", "GET") + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + service_registry_client.remove_method( + "payment-registry", + "payment-service", + "processPayment", + "POST" + ) + ``` + """ + self._service_registry_api.remove_method( + registry_name=registry_name, + service_name=service_name, + method=method, + method_type=method_type, + **kwargs, ) - def get_proto_data(self, registry_name: str, filename: str) -> bytes: - return self.serviceRegistryResourceApi.get_proto_data(registry_name, filename) + def get_proto_data(self, registry_name: str, filename: str, **kwargs) -> bytes: + """Get Protocol Buffer data for a service. + + Args: + registry_name: Name of the service registry + filename: Name of the proto file + **kwargs: Additional optional parameters to pass to the API + + Returns: + Proto data as bytes + + Example: + ```python + proto_data = service_registry_client.get_proto_data( + "payment-registry", + "payment.proto" + ) + ``` + """ + return self._service_registry_api.get_proto_data( + registry_name=registry_name, filename=filename, **kwargs + ) - def set_proto_data(self, registry_name: str, filename: str, data: bytes) -> None: - self.serviceRegistryResourceApi.set_proto_data(registry_name, filename, data) + def set_proto_data(self, registry_name: str, filename: str, data: bytes, **kwargs) -> None: + """Set Protocol Buffer data for a service. - def delete_proto(self, registry_name: str, filename: str) -> None: - self.serviceRegistryResourceApi.delete_proto(registry_name, filename) + Args: + registry_name: Name of the service registry + filename: Name of the proto file + data: Proto data as bytes + **kwargs: Additional optional parameters to pass to the API - def get_all_protos(self, registry_name: str) -> List[ProtoRegistryEntry]: - return self.serviceRegistryResourceApi.get_all_protos(registry_name) + Returns: + None - def discover(self, name: str, create: Optional[bool] = False) -> List[ServiceMethod]: - kwargs = {} + Example: + ```python + with open("payment.proto", "rb") as f: + proto_data = f.read() + + service_registry_client.set_proto_data( + "payment-registry", + "payment.proto", + proto_data + ) + ``` + """ + self._service_registry_api.set_proto_data( + registry_name=registry_name, filename=filename, data=data, **kwargs + ) + + def delete_proto(self, registry_name: str, filename: str, **kwargs) -> None: + """Delete Protocol Buffer data for a service. + + Args: + registry_name: Name of the service registry + filename: Name of the proto file to delete + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + service_registry_client.delete_proto("payment-registry", "old_payment.proto") + ``` + """ + self._service_registry_api.delete_proto( + registry_name=registry_name, filename=filename, **kwargs + ) + + def get_all_protos(self, registry_name: str, **kwargs) -> List[ProtoRegistryEntry]: + """Get all Protocol Buffer entries for a registry. + + Args: + registry_name: Name of the service registry + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of ProtoRegistryEntry instances + + Example: + ```python + protos = service_registry_client.get_all_protos("payment-registry") + for proto in protos: + print(f"Proto: {proto.filename}, Size: {len(proto.data)} bytes") + ``` + """ + return self._service_registry_api.get_all_protos(registry_name=registry_name, **kwargs) + + def discover(self, name: str, create: Optional[bool] = False, **kwargs) -> List[ServiceMethod]: + """Discover methods for a service. + + Args: + name: Name of the service to discover + create: If True, create the service if it doesn't exist + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of ServiceMethod instances + + Example: + ```python + methods = service_registry_client.discover("payment-service", create=True) + for method in methods: + print(f"Method: {method.name}, Type: {method.method_type}") + ``` + """ if create: kwargs.update({"create": create}) - return self.serviceRegistryResourceApi.discover(name, **kwargs) + return self._service_registry_api.discover(name=name, **kwargs) # Additional convenience methods can be added here if needed - def get_queue_sizes_for_all_tasks(self) -> dict: - """Get queue sizes for all task types""" - return self.taskResourceApi.all() + def get_queue_sizes_for_all_tasks(self, **kwargs) -> Dict[str, int]: + """Get queue sizes for all task types. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping task types to queue sizes + + Example: + ```python + queue_sizes = service_registry_client.get_queue_sizes_for_all_tasks() + for task_type, size in queue_sizes.items(): + print(f"Task: {task_type}, Queue Size: {size}") + ``` + """ + return self._task_api.all(**kwargs) + + def is_circuit_breaker_open(self, name: str, **kwargs) -> bool: + """Check if circuit breaker is open for a service. + + Args: + name: Name of the service + **kwargs: Additional optional parameters to pass to the API + + Returns: + True if circuit breaker is open, False otherwise - def is_circuit_breaker_open(self, name: str) -> bool: - """Check if circuit breaker is open for a service""" - status = self.get_circuit_breaker_status(name) + Example: + ```python + if service_registry_client.is_circuit_breaker_open("payment-service"): + print("Circuit breaker is open - service is unavailable") + else: + print("Circuit breaker is closed - service is available") + ``` + """ + status = self._service_registry_api.get_circuit_breaker_status(name=name, **kwargs) return bool(status.current_state and status.current_state.upper() == "OPEN") diff --git a/src/conductor/client/orkes/orkes_task_client.py b/src/conductor/client/orkes/orkes_task_client.py index a1245b0c9..7f65d0f3d 100644 --- a/src/conductor/client/orkes/orkes_task_client.py +++ b/src/conductor/client/orkes/orkes_task_client.py @@ -1,9 +1,12 @@ from __future__ import annotations -from typing import List, Optional +from typing import Dict, List, Optional from conductor.client.configuration.configuration import Configuration from conductor.client.http.models.poll_data import PollData +from conductor.client.http.models.search_result_task import SearchResultTask +from conductor.client.http.models.search_result_task_summary import SearchResultTaskSummary +from conductor.client.http.models.signal_response import SignalResponse from conductor.client.http.models.task import Task from conductor.client.http.models.task_exec_log import TaskExecLog from conductor.client.http.models.task_result import TaskResult @@ -14,18 +17,53 @@ class OrkesTaskClient(OrkesBaseClient, TaskClient): def __init__(self, configuration: Configuration): - super(OrkesTaskClient, self).__init__(configuration) + """Initialize the OrkesTaskClient with configuration. + + Args: + configuration: Configuration object containing server settings and authentication + + Example: + ```python + from conductor.client.configuration.configuration import Configuration + + config = Configuration(server_api_url="http://localhost:8080/api") + task_client = OrkesTaskClient(config) + ``` + """ + super().__init__(configuration) def poll_task( - self, task_type: str, worker_id: Optional[str] = None, domain: Optional[str] = None - ) -> Optional[Task]: - kwargs = {} + self, + task_type: str, + worker_id: Optional[str] = None, + domain: Optional[str] = None, + **kwargs, + ) -> Task: + """Poll for a single task of a certain type. + + Args: + task_type: Type of task to poll for + worker_id: Optional worker ID for tracking + domain: Optional domain for task isolation + **kwargs: Additional optional parameters to pass to the API + + Returns: + Task instance if a task is available + + Example: + ```python + task = task_client.poll_task("process_order", worker_id="worker-1") + if task: + print(f"Got task: {task.task_id}") + # Process the task + ``` + """ if worker_id: kwargs.update({"workerid": worker_id}) if domain: kwargs.update({"domain": domain}) - return self.taskResourceApi.poll(task_type, **kwargs) + return self._task_api.poll(tasktype=task_type, **kwargs) def batch_poll_tasks( self, @@ -34,8 +72,32 @@ def batch_poll_tasks( count: Optional[int] = None, timeout_in_millisecond: Optional[int] = None, domain: Optional[str] = None, + **kwargs, ) -> List[Task]: - kwargs = {} + """Poll for multiple tasks of a certain type. + + Args: + task_type: Type of task to poll for + worker_id: Optional worker ID for tracking + count: Maximum number of tasks to poll + timeout_in_millisecond: Timeout for the poll operation in milliseconds + domain: Optional domain for task isolation + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of Task instances + + Example: + ```python + tasks = task_client.batch_poll_tasks( + "process_order", + worker_id="worker-1", + count=10, + timeout_in_millisecond=5000 + ) + print(f"Got {len(tasks)} tasks") + ``` + """ if worker_id: kwargs.update({"workerid": worker_id}) if count: @@ -44,14 +106,50 @@ def batch_poll_tasks( kwargs.update({"timeout": timeout_in_millisecond}) if domain: kwargs.update({"domain": domain}) + return self._task_api.batch_poll(tasktype=task_type, **kwargs) + + def get_task(self, task_id: str, **kwargs) -> Task: + """Get a task by ID. + + Args: + task_id: Unique identifier for the task + **kwargs: Additional optional parameters to pass to the API + + Returns: + Task instance + + Example: + ```python + task = task_client.get_task("task-123") + print(f"Task status: {task.status}") + ``` + """ + return self._task_api.get_task(task_id=task_id, **kwargs) + + def update_task(self, task_result: TaskResult, **kwargs) -> str: + """Update a task with result. + + Args: + task_result: Task result containing status and output + **kwargs: Additional optional parameters to pass to the API - return self.taskResourceApi.batch_poll(task_type, **kwargs) + Returns: + Workflow ID as string - def get_task(self, task_id: str) -> Task: - return self.taskResourceApi.get_task(task_id) + Example: + ```python + from conductor.client.http.models.task_result import TaskResult - def update_task(self, task_result: TaskResult) -> str: - return self.taskResourceApi.update_task(task_result) + result = TaskResult( + task_id="task-123", + status="COMPLETED", + output_data={"result": "success"} + ) + workflow_id = task_client.update_task(result) + print(f"Updated task in workflow: {workflow_id}") + ``` + """ + return self._task_api.update_task(body=task_result, **kwargs) def update_task_by_ref_name( self, @@ -60,12 +158,37 @@ def update_task_by_ref_name( status: str, output: object, worker_id: Optional[str] = None, + **kwargs, ) -> str: + """Update a task by reference name within a workflow. + + Args: + workflow_id: ID of the workflow containing the task + task_ref_name: Reference name of the task in the workflow + status: New status for the task (e.g., "COMPLETED", "FAILED") + output: Output data for the task + worker_id: Optional worker ID for tracking + **kwargs: Additional optional parameters to pass to the API + + Returns: + Workflow ID as string + + Example: + ```python + workflow_id = task_client.update_task_by_ref_name( + workflow_id="workflow-123", + task_ref_name="process_order", + status="COMPLETED", + output={"order_id": "12345", "status": "processed"} + ) + ``` + """ body = {"result": output} - kwargs = {} if worker_id: kwargs.update({"workerid": worker_id}) - return self.taskResourceApi.update_task1(body, workflow_id, task_ref_name, status, **kwargs) + return self._task_api.update_task1( + body=body, workflow_id=workflow_id, task_ref_name=task_ref_name, status=status, **kwargs + ) def update_task_sync( self, @@ -74,27 +197,318 @@ def update_task_sync( status: str, output: object, worker_id: Optional[str] = None, + **kwargs, ) -> Workflow: + """Update a task synchronously and get the updated workflow. + + Args: + workflow_id: ID of the workflow containing the task + task_ref_name: Reference name of the task in the workflow + status: New status for the task + output: Output data for the task (dict or other object) + worker_id: Optional worker ID for tracking + **kwargs: Additional optional parameters to pass to the API + + Returns: + Workflow instance with updated state + + Example: + ```python + workflow = task_client.update_task_sync( + workflow_id="workflow-123", + task_ref_name="process_order", + status="COMPLETED", + output={"order_id": "12345"} + ) + print(f"Workflow status: {workflow.status}") + ``` + """ if not isinstance(output, dict): output = {"result": output} body = output - kwargs = {} if worker_id: kwargs.update({"workerid": worker_id}) - return self.taskResourceApi.update_task_sync( - body, workflow_id, task_ref_name, status, **kwargs + return self._task_api.update_task_sync( + body=body, workflow_id=workflow_id, task_ref_name=task_ref_name, status=status, **kwargs ) - def get_queue_size_for_task(self, task_type: str) -> int: - queueSizesByTaskType = self.taskResourceApi.size(task_type=[task_type]) + def get_queue_size_for_task(self, task_type: str, **kwargs) -> int: + """Get the queue size for a specific task type. + + Args: + task_type: Type of task to check + **kwargs: Additional optional parameters to pass to the API + + Returns: + Queue size as integer + + Example: + ```python + size = task_client.get_queue_size_for_task("process_order") + print(f"Queue size: {size}") + ``` + """ + queueSizesByTaskType = self._task_api.size(task_type=[task_type], **kwargs) queueSize = queueSizesByTaskType.get(task_type, 0) return queueSize - def add_task_log(self, task_id: str, log_message: str): - self.taskResourceApi.log(body=log_message, task_id=task_id) + def add_task_log(self, task_id: str, log_message: str, **kwargs) -> None: + """Add a log message to a task. + + Args: + task_id: Unique identifier for the task + log_message: Log message to add + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + task_client.add_task_log("task-123", "Processing started") + ``` + """ + self._task_api.log(body=log_message, task_id=task_id, **kwargs) + + def get_task_logs(self, task_id: str, **kwargs) -> List[TaskExecLog]: + """Get all log messages for a task. + + Args: + task_id: Unique identifier for the task + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of TaskExecLog instances + + Example: + ```python + logs = task_client.get_task_logs("task-123") + for log in logs: + print(f"{log.created_time}: {log.log}") + ``` + """ + return self._task_api.get_task_logs(task_id=task_id, **kwargs) + + def get_task_poll_data(self, task_type: str, **kwargs) -> List[PollData]: + """Get poll data for a specific task type. + + Args: + task_type: Type of task to get poll data for + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of PollData instances + + Example: + ```python + poll_data = task_client.get_task_poll_data("process_order") + for data in poll_data: + print(f"Worker: {data.worker_id}, Last poll: {data.last_poll_time}") + ``` + """ + return self._task_api.get_poll_data(task_type=task_type, **kwargs) + + def get_all_poll_data(self, **kwargs) -> Dict[str, object]: + """Get poll data for all task types. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping task types to poll data + + Example: + ```python + all_poll_data = task_client.get_all_poll_data() + for task_type, data in all_poll_data.items(): + print(f"Task type: {task_type}") + ``` + """ + return self._task_api.get_all_poll_data(**kwargs) + + def requeue_pending_task(self, task_type: str, **kwargs) -> str: + """Requeue all pending tasks of a certain type. + + Args: + task_type: Type of task to requeue + **kwargs: Additional optional parameters to pass to the API + + Returns: + Result message as string + + Example: + ```python + result = task_client.requeue_pending_task("process_order") + print(f"Requeue result: {result}") + ``` + """ + return self._task_api.requeue_pending_task(task_type=task_type, **kwargs) + + def search_tasks( + self, + start: int = 0, + size: int = 100, + sort: Optional[str] = None, + free_text: Optional[str] = None, + query: Optional[str] = None, + **kwargs, + ) -> SearchResultTaskSummary: + """Search for tasks based on payload and other parameters. + + Args: + start: Start index for pagination + size: Page size + sort: Sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC + free_text: Free text search + query: Query string + **kwargs: Additional optional parameters to pass to the API + + Returns: + SearchResultTaskSummary with matching tasks + + Example: + ```python + results = task_client.search_tasks( + start=0, + size=20, + sort="startTime:DESC", + query="taskType='process_order'" + ) + print(f"Found {results.total_hits} tasks") + ``` + """ + return self._task_api.search1( + start=start, size=size, sort=sort, free_text=free_text, query=query, **kwargs + ) + + def search_tasks_v2( + self, + start: int = 0, + size: int = 100, + sort: Optional[str] = None, + free_text: Optional[str] = None, + query: Optional[str] = None, + **kwargs, + ) -> SearchResultTask: + """Search for tasks based on payload and other parameters (v2 API). + + Args: + start: Start index for pagination + size: Page size + sort: Sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC + free_text: Free text search + query: Query string + **kwargs: Additional optional parameters to pass to the API + + Returns: + SearchResultTask with matching tasks + + Example: + ```python + results = task_client.search_tasks_v2( + start=0, + size=20, + sort="startTime:DESC", + free_text="order" + ) + print(f"Found {results.total_hits} tasks") + for task in results.results: + print(f"Task: {task.task_id}, Status: {task.status}") + ``` + """ + return self._task_api.search_v21( + start=start, size=size, sort=sort, free_text=free_text, query=query, **kwargs + ) + + def signal_workflow_task_async( + self, workflow_id: str, status: str, body: Dict[str, object], **kwargs + ) -> None: + """Signal a workflow task asynchronously. + + Args: + workflow_id: ID of the workflow containing the task + status: Status to signal + body: Signal payload + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + task_client.signal_workflow_task_async( + workflow_id="workflow-123", + status="COMPLETED", + body={"result": "success"} + ) + ``` + """ + self._task_api.signal_workflow_task_async( + workflow_id=workflow_id, status=status, body=body, **kwargs + ) + + def signal_workflow_task_sync( + self, workflow_id: str, status: str, body: Dict[str, object], **kwargs + ) -> SignalResponse: + """Signal a workflow task synchronously. + + Args: + workflow_id: ID of the workflow containing the task + status: Status to signal + body: Signal payload + **kwargs: Additional optional parameters to pass to the API + + Returns: + SignalResponse with signal result + + Example: + ```python + response = task_client.signal_workflow_task_sync( + workflow_id="workflow-123", + status="COMPLETED", + body={"result": "success"} + ) + print(f"Signal response: {response.status}") + ``` + """ + return self._task_api.signal_workflow_task_sync( + workflow_id=workflow_id, status=status, body=body, **kwargs + ) + + def get_task_queue_sizes(self, **kwargs) -> Dict[str, int]: + """Get the size of all task queues. + + Args: + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping task types to queue sizes + + Example: + ```python + queue_sizes = task_client.get_task_queue_sizes() + for task_type, size in queue_sizes.items(): + print(f"Task: {task_type}, Queue Size: {size}") + ``` + """ + return self._task_api.all(**kwargs) + + def get_task_queue_sizes_verbose(self, **kwargs) -> Dict[str, Dict[str, Dict[str, int]]]: + """Get detailed information about all task queues. + + Args: + **kwargs: Additional optional parameters to pass to the API - def get_task_logs(self, task_id: str) -> List[TaskExecLog]: - return self.taskResourceApi.get_task_logs(task_id) + Returns: + Nested dictionary with detailed queue information - def get_task_poll_data(self, task_type: str) -> List[PollData]: - return self.taskResourceApi.get_poll_data(task_type=task_type) + Example: + ```python + queue_info = task_client.get_task_queue_sizes_verbose() + for task_type, details in queue_info.items(): + print(f"Task type: {task_type}") + for domain, stats in details.items(): + print(f" Domain: {domain}, Stats: {stats}") + ``` + """ + return self._task_api.all_verbose(**kwargs) diff --git a/src/conductor/client/orkes/orkes_workflow_client.py b/src/conductor/client/orkes/orkes_workflow_client.py index aa08003e3..94085968a 100644 --- a/src/conductor/client/orkes/orkes_workflow_client.py +++ b/src/conductor/client/orkes/orkes_workflow_client.py @@ -1,7 +1,10 @@ from __future__ import annotations import uuid -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional + +from deprecated import deprecated +from typing_extensions import deprecated as typing_deprecated from conductor.client.configuration.configuration import Configuration from conductor.client.http.models.correlation_ids_search_request import CorrelationIdsSearchRequest @@ -9,9 +12,10 @@ from conductor.client.http.models.scrollable_search_result_workflow_summary import ( ScrollableSearchResultWorkflowSummary, ) -from conductor.client.http.models.signal_response import SignalResponse from conductor.client.http.models.skip_task_request import SkipTaskRequest from conductor.client.http.models.start_workflow_request import StartWorkflowRequest +from conductor.client.http.models.task_list_search_result_summary import TaskListSearchResultSummary +from conductor.client.http.models.upgrade_workflow_request import UpgradeWorkflowRequest from conductor.client.http.models.workflow import Workflow from conductor.client.http.models.workflow_run import WorkflowRun from conductor.client.http.models.workflow_state_update import WorkflowStateUpdate @@ -23,8 +27,27 @@ class OrkesWorkflowClient(OrkesBaseClient, WorkflowClient): def __init__(self, configuration: Configuration): - super(OrkesWorkflowClient, self).__init__(configuration) + """Initialize the OrkesWorkflowClient with configuration. + + Args: + configuration: Configuration object containing server settings and authentication + Example: + ```python + from conductor.client.configuration.configuration import Configuration + + config = Configuration(server_api_url="http://localhost:8080/api") + workflow_client = OrkesWorkflowClient(config) + ``` + """ + super().__init__(configuration) + + @deprecated( + "start_workflow_by_name is deprecated; use start_workflow_by_name_validated instead" + ) + @typing_deprecated( + "start_workflow_by_name is deprecated; use start_workflow_by_name_validated instead" + ) def start_workflow_by_name( self, name: str, @@ -32,8 +55,24 @@ def start_workflow_by_name( version: Optional[int] = None, correlationId: Optional[str] = None, priority: Optional[int] = None, + **kwargs, ) -> str: - kwargs = {} + """Start a workflow by name with input data. + + .. deprecated:: + Use start_workflow_by_name_validated() instead. + + Args: + name: Name of the workflow to start + input: Input data for the workflow as dictionary + version: Optional workflow version + correlationId: Optional correlation ID + priority: Optional priority level + **kwargs: Additional optional parameters to pass to the API + + Returns: + Workflow ID as string + """ if version: kwargs.update({"version": version}) if correlationId: @@ -41,10 +80,83 @@ def start_workflow_by_name( if priority: kwargs.update({"priority": priority}) - return self.workflowResourceApi.start_workflow1(input, name, **kwargs) + return self._workflow_api.start_workflow1(input, name, **kwargs) - def start_workflow(self, start_workflow_request: StartWorkflowRequest) -> str: - return self.workflowResourceApi.start_workflow(start_workflow_request) + def start_workflow_by_name_validated( + self, + name: str, + input: Dict[str, object], + version: Optional[int] = None, + correlation_id: Optional[str] = None, + priority: Optional[int] = None, + **kwargs, + ) -> str: + """Start a workflow by name with input data. + + Args: + name: Name of the workflow to start + input: Input data for the workflow as dictionary + version: Optional workflow version. If None, uses latest version + correlation_id: Optional correlation ID for tracking related workflows + priority: Optional priority level (0-99, higher is more priority) + **kwargs: Additional optional parameters to pass to the API + + Returns: + Workflow ID as string + + Example: + ```python + # Start a simple workflow + workflow_id = workflow_client.start_workflow_by_name_validated( + "order_processing", + {"order_id": "12345", "customer_id": "cust-999"} + ) + print(f"Started workflow: {workflow_id}") + + # Start with priority and correlation + workflow_id = workflow_client.start_workflow_by_name_validated( + "urgent_order_processing", + {"order_id": "99999"}, + version=2, + priority=10, + correlation_id="batch-2024-01" + ) + ``` + """ + if version: + kwargs.update({"version": version}) + if correlation_id: + kwargs.update({"correlation_id": correlation_id}) + if priority: + kwargs.update({"priority": priority}) + + return self._workflow_api.start_workflow1(body=input, name=name, **kwargs) + + def start_workflow(self, start_workflow_request: StartWorkflowRequest, **kwargs) -> str: + """Start a workflow using a StartWorkflowRequest object. + + Args: + start_workflow_request: Workflow start request with all parameters + **kwargs: Additional optional parameters to pass to the API + + Returns: + Workflow ID as string + + Example: + ```python + from conductor.client.http.models.start_workflow_request import StartWorkflowRequest + + request = StartWorkflowRequest( + name="order_processing", + version=1, + input={"order_id": "12345"}, + correlation_id="batch-001", + priority=5 + ) + workflow_id = workflow_client.start_workflow(request) + ``` + """ + return self._workflow_api.start_workflow(body=start_workflow_request, **kwargs) def execute_workflow( self, @@ -52,14 +164,47 @@ def execute_workflow( request_id: Optional[str] = None, wait_until_task_ref: Optional[str] = None, wait_for_seconds: int = 30, + **kwargs, ) -> WorkflowRun: - return self.workflowResourceApi.execute_workflow( + """Execute a workflow synchronously and wait for completion. + + Args: + start_workflow_request: Workflow start request + request_id: Optional request ID for tracking + wait_until_task_ref: Wait until this task reference is reached + wait_for_seconds: How long to wait for completion (default 30) + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowRun with execution result + + Example: + ```python + from conductor.client.http.models.start_workflow_request import StartWorkflowRequest + + request = StartWorkflowRequest( + name="order_processing", + version=1, + input={"order_id": "12345"} + ) + + result = workflow_client.execute_workflow( + request, + wait_for_seconds=60 + ) + + print(f"Workflow status: {result.status}") + print(f"Output: {result.output}") + ``` + """ + return self._workflow_api.execute_workflow( body=start_workflow_request, request_id=request_id, version=start_workflow_request.version, name=start_workflow_request.name, wait_until_task_ref=wait_until_task_ref, wait_for_seconds=wait_for_seconds, + **kwargs, ) def execute_workflow_with_return_strategy( @@ -70,8 +215,10 @@ def execute_workflow_with_return_strategy( wait_for_seconds: int = 30, consistency: Optional[str] = None, return_strategy: Optional[str] = None, - ) -> SignalResponse: - """Execute a workflow synchronously with optional reactive features + **kwargs, + ) -> WorkflowRun: + """Execute a workflow synchronously with optional reactive features. + Args: start_workflow_request: StartWorkflowRequest containing workflow details request_id: Optional request ID for tracking @@ -79,15 +226,35 @@ def execute_workflow_with_return_strategy( wait_for_seconds: How long to wait for completion (default 30) consistency: Workflow consistency level - 'DURABLE' or 'SYNCHRONOUS' or 'REGION_DURABLE' return_strategy: Return strategy - 'TARGET_WORKFLOW' or 'BLOCKING_WORKFLOW' or 'BLOCKING_TASK' or 'BLOCKING_TASK_INPUT' + **kwargs: Additional optional parameters to pass to the API + Returns: - WorkflowRun: The workflow execution result + WorkflowRun with the workflow execution result + + Example: + ```python + from conductor.client.http.models.start_workflow_request import StartWorkflowRequest + + request = StartWorkflowRequest( + name="data_pipeline", + version=1, + input={"dataset": "customers"} + ) + + result = workflow_client.execute_workflow_with_return_strategy( + request, + wait_for_seconds=120, + consistency="SYNCHRONOUS", + return_strategy="TARGET_WORKFLOW" + ) + ``` """ if consistency is None: consistency = "DURABLE" if return_strategy is None: return_strategy = "TARGET_WORKFLOW" - return self.workflowResourceApi.execute_workflow_with_return_strategy( + return self._workflow_api.execute_workflow_with_return_strategy( body=start_workflow_request, name=start_workflow_request.name, version=start_workflow_request.version, @@ -96,69 +263,307 @@ def execute_workflow_with_return_strategy( wait_for_seconds=wait_for_seconds, consistency=consistency, return_strategy=return_strategy, + **kwargs, ) - def pause_workflow(self, workflow_id: str): - self.workflowResourceApi.pause_workflow(workflow_id) + def pause_workflow(self, workflow_id: str, **kwargs) -> None: + """Pause a running workflow. + + Args: + workflow_id: ID of the workflow to pause + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + workflow_client.pause_workflow("workflow-123") + ``` + """ + self._workflow_api.pause_workflow(workflow_id=workflow_id, **kwargs) + + def resume_workflow(self, workflow_id: str, **kwargs) -> None: + """Resume a paused workflow. + + Args: + workflow_id: ID of the workflow to resume + **kwargs: Additional optional parameters to pass to the API + + Returns: + None - def resume_workflow(self, workflow_id: str): - self.workflowResourceApi.resume_workflow(workflow_id) + Example: + ```python + workflow_client.resume_workflow("workflow-123") + ``` + """ + self._workflow_api.resume_workflow(workflow_id=workflow_id, **kwargs) + + def restart_workflow( + self, workflow_id: str, use_latest_def: Optional[bool] = False, **kwargs + ) -> None: + """Restart a workflow from the beginning. + + Args: + workflow_id: ID of the workflow to restart + use_latest_def: If True, use latest workflow definition + **kwargs: Additional optional parameters to pass to the API + + Returns: + None - def restart_workflow(self, workflow_id: str, use_latest_def: Optional[bool] = False): - kwargs = {} + Example: + ```python + # Restart with current definition + workflow_client.restart_workflow("workflow-123") + + # Restart with latest definition + workflow_client.restart_workflow("workflow-123", use_latest_def=True) + ``` + """ if use_latest_def: - kwargs["use_latest_definitions"] = use_latest_def - self.workflowResourceApi.restart(workflow_id, **kwargs) + kwargs.update({"use_latest_definitions": use_latest_def}) + + self._workflow_api.restart(workflow_id=workflow_id, **kwargs) + + def rerun_workflow( + self, workflow_id: str, rerun_workflow_request: RerunWorkflowRequest, **kwargs + ) -> str: + """Rerun a workflow from a specific task. + + Args: + workflow_id: ID of the workflow to rerun + rerun_workflow_request: Configuration for the rerun + **kwargs: Additional optional parameters to pass to the API - def rerun_workflow(self, workflow_id: str, rerun_workflow_request: RerunWorkflowRequest) -> str: + Returns: + New workflow ID as string + + Example: + ```python + from conductor.client.http.models.rerun_workflow_request import RerunWorkflowRequest + + rerun_request = RerunWorkflowRequest( + re_run_from_task_id="task-456" + ) + + new_workflow_id = workflow_client.rerun_workflow("workflow-123", rerun_request) + print(f"Rerun workflow ID: {new_workflow_id}") + ``` + """ rerun_workflow_request.re_run_from_workflow_id = workflow_id - return self.workflowResourceApi.rerun(rerun_workflow_request, workflow_id) + return self._workflow_api.rerun( + body=rerun_workflow_request, workflow_id=workflow_id, **kwargs + ) + + def retry_workflow( + self, workflow_id: str, resume_subworkflow_tasks: Optional[bool] = False, **kwargs + ) -> None: + """Retry a failed workflow. + + Args: + workflow_id: ID of the workflow to retry + resume_subworkflow_tasks: If True, resume subworkflow tasks + **kwargs: Additional optional parameters to pass to the API + + Returns: + None - def retry_workflow(self, workflow_id: str, resume_subworkflow_tasks: Optional[bool] = False): - kwargs = {} + Example: + ```python + workflow_client.retry_workflow("workflow-123", resume_subworkflow_tasks=True) + ``` + """ if resume_subworkflow_tasks: - kwargs["resume_subworkflow_tasks"] = resume_subworkflow_tasks - self.workflowResourceApi.retry(workflow_id, **kwargs) + kwargs.update({"resume_subworkflow_tasks": resume_subworkflow_tasks}) + self._workflow_api.retry(workflow_id=workflow_id, **kwargs) def terminate_workflow( - self, workflow_id: str, reason: Optional[str] = None, trigger_failure_workflow: bool = False - ): - kwargs = {} + self, + workflow_id: str, + reason: Optional[str] = None, + trigger_failure_workflow: bool = False, + **kwargs, + ) -> None: + """Terminate a running workflow. + + Args: + workflow_id: ID of the workflow to terminate + reason: Optional reason for termination + trigger_failure_workflow: If True, trigger the failure workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + workflow_client.terminate_workflow( + "workflow-123", + reason="Cancelled by user", + trigger_failure_workflow=True + ) + ``` + """ if reason: - kwargs["reason"] = reason + kwargs.update({"reason": reason}) if trigger_failure_workflow: - kwargs["trigger_failure_workflow"] = trigger_failure_workflow - self.workflowResourceApi.terminate1(workflow_id, **kwargs) + kwargs.update({"trigger_failure_workflow": trigger_failure_workflow}) + self._workflow_api.terminate1(workflow_id=workflow_id, **kwargs) + + def get_workflow( + self, workflow_id: str, include_tasks: Optional[bool] = True, **kwargs + ) -> Workflow: + """Get workflow execution status and details. + + Args: + workflow_id: ID of the workflow + include_tasks: If True, include task details in the response + **kwargs: Additional optional parameters to pass to the API - def get_workflow(self, workflow_id: str, include_tasks: Optional[bool] = True) -> Workflow: - kwargs = {} + Returns: + Workflow instance with execution details + + Example: + ```python + workflow = workflow_client.get_workflow("workflow-123", include_tasks=True) + print(f"Status: {workflow.status}") + print(f"Tasks: {len(workflow.tasks)}") + ``` + """ if include_tasks: - kwargs["include_tasks"] = include_tasks - return self.workflowResourceApi.get_execution_status(workflow_id, **kwargs) + kwargs.update({"include_tasks": include_tasks}) + return self._workflow_api.get_execution_status(workflow_id=workflow_id, **kwargs) def get_workflow_status( self, workflow_id: str, include_output: Optional[bool] = None, include_variables: Optional[bool] = None, + **kwargs, ) -> WorkflowStatus: - kwargs = {} + """Get workflow status summary. + + Args: + workflow_id: ID of the workflow + include_output: If True, include workflow output in the response + include_variables: If True, include workflow variables in the response + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowStatus with status information + + Example: + ```python + status = workflow_client.get_workflow_status( + "workflow-123", + include_output=True, + include_variables=True + ) + print(f"Status: {status.status}") + print(f"Output: {status.output}") + ``` + """ if include_output is not None: - kwargs["include_output"] = include_output + kwargs.update({"include_output": include_output}) if include_variables is not None: - kwargs["include_variables"] = include_variables - return self.workflowResourceApi.get_workflow_status_summary(workflow_id, **kwargs) + kwargs.update({"include_variables": include_variables}) + return self._workflow_api.get_workflow_status_summary(workflow_id=workflow_id, **kwargs) + + def delete_workflow(self, workflow_id: str, archive_workflow: Optional[bool] = True, **kwargs): + """Delete a workflow from the system. + + Args: + workflow_id: ID of the workflow to delete + archive_workflow: If True, archive instead of permanently deleting + **kwargs: Additional optional parameters to pass to the API - def delete_workflow(self, workflow_id: str, archive_workflow: Optional[bool] = True): - self.workflowResourceApi.delete1(workflow_id, archive_workflow=archive_workflow) + Returns: + None + + Example: + ```python + # Archive the workflow + workflow_client.delete_workflow("workflow-123", archive_workflow=True) + + # Permanently delete + workflow_client.delete_workflow("workflow-123", archive_workflow=False) + ``` + """ + self._workflow_api.delete1( + workflow_id=workflow_id, archive_workflow=archive_workflow, **kwargs + ) def skip_task_from_workflow( - self, workflow_id: str, task_reference_name: str, request: Optional[SkipTaskRequest] - ): - self.workflowResourceApi.skip_task_from_workflow(workflow_id, task_reference_name, request) + self, + workflow_id: str, + task_reference_name: str, + request: Optional[SkipTaskRequest], + **kwargs, + ) -> None: + """Skip a task in a running workflow. + + Args: + workflow_id: ID of the workflow + task_reference_name: Reference name of the task to skip + request: Optional skip task request with parameters + **kwargs: Additional optional parameters to pass to the API + + Returns: + None - def test_workflow(self, test_request: WorkflowTestRequest) -> Workflow: - return self.workflowResourceApi.test_workflow(test_request) + Example: + ```python + from conductor.client.http.models.skip_task_request import SkipTaskRequest + + skip_request = SkipTaskRequest( + task_input={"skipped": True}, + task_output={"result": "skipped"} + ) + + workflow_client.skip_task_from_workflow( + "workflow-123", + "send_email", + skip_request + ) + ``` + """ + self._workflow_api.skip_task_from_workflow( + workflow_id=workflow_id, task_reference_name=task_reference_name, body=request, **kwargs + ) + + def test_workflow(self, test_request: WorkflowTestRequest, **kwargs) -> Workflow: + """Test a workflow definition without persisting it. + + Args: + test_request: Workflow test request with definition and input + **kwargs: Additional optional parameters to pass to the API + + Returns: + Workflow instance with test execution result + + Example: + ```python + from conductor.client.http.models.workflow_test_request import WorkflowTestRequest + from conductor.client.http.models.workflow_def import WorkflowDef + + workflow_def = WorkflowDef( + name="test_workflow", + version=1, + tasks=[...] + ) + + test_request = WorkflowTestRequest( + workflow_def=workflow_def, + input={"test": "data"} + ) + + result = workflow_client.test_workflow(test_request) + print(f"Test result: {result.status}") + ``` + """ + return self._workflow_api.test_workflow(body=test_request, **kwargs) def search( self, @@ -168,33 +573,97 @@ def search( query: Optional[str] = None, query_id: Optional[str] = None, skip_cache: bool = False, + **kwargs, ) -> ScrollableSearchResultWorkflowSummary: - args = { - "start": start, - "size": size, - "free_text": free_text, - "query": query, - "skip_cache": skip_cache, - } - return self.workflowResourceApi.search(**args) + """Search for workflows. + + Args: + start: Start index for pagination + size: Number of results to return + free_text: Free text search query + query: Structured query string + query_id: Optional query ID for cached searches + skip_cache: If True, skip cache and fetch fresh results + **kwargs: Additional optional parameters to pass to the API + + Returns: + ScrollableSearchResultWorkflowSummary with search results + + Example: + ```python + results = workflow_client.search( + start=0, + size=20, + free_text="order_processing", + query="status='FAILED'" + ) + + print(f"Total: {results.total_hits}") + for workflow in results.results: + print(f"Workflow: {workflow.workflow_id}, Status: {workflow.status}") + ``` + """ + if start is not None: + kwargs.update({"start": start}) + if size is not None: + kwargs.update({"size": size}) + if free_text is not None: + kwargs.update({"free_text": free_text}) + if query is not None: + kwargs.update({"query": query}) + if query_id is not None: + kwargs.update({"query_id": query_id}) + if skip_cache is not None: + kwargs.update({"skip_cache": skip_cache}) + + return self._workflow_api.search(**kwargs) def get_by_correlation_ids_in_batch( self, batch_request: CorrelationIdsSearchRequest, include_completed: bool = False, include_tasks: bool = False, + **kwargs, ) -> Dict[str, List[Workflow]]: - """Given the list of correlation ids and list of workflow names, find and return workflows - Returns a map with key as correlationId and value as a list of Workflows - When IncludeClosed is set to true, the return value also includes workflows that are completed otherwise only running workflows are returned""" - kwargs = {} + """Get workflows by correlation IDs in batch. + + Given the list of correlation ids and list of workflow names, find and return workflows. + Returns a map with key as correlationId and value as a list of Workflows. + When include_completed is set to true, the return value also includes workflows that are + completed otherwise only running workflows are returned. + + Args: + batch_request: Correlation IDs search request + include_completed: If True, include completed workflows + include_tasks: If True, include task details + **kwargs: Additional optional parameters to pass to the API - kwargs["body"] = batch_request + Returns: + Dictionary mapping correlation IDs to lists of Workflow instances + + Example: + ```python + from conductor.client.http.models.correlation_ids_search_request import CorrelationIdsSearchRequest + + batch_request = CorrelationIdsSearchRequest( + workflow_names=["order_processing"], + correlation_ids=["batch-001", "batch-002"] + ) + + workflows = workflow_client.get_by_correlation_ids_in_batch( + batch_request, + include_completed=True + ) + + for corr_id, wf_list in workflows.items(): + print(f"Correlation: {corr_id}, Workflows: {len(wf_list)}") + ``` + """ if include_tasks: - kwargs["include_tasks"] = include_tasks + kwargs.update({"include_tasks": include_tasks}) if include_completed: - kwargs["include_closed"] = include_completed - return self.workflowResourceApi.get_workflows1(**kwargs) + kwargs.update({"include_closed": include_completed}) + return self._workflow_api.get_workflows1(body=batch_request, **kwargs) def get_by_correlation_ids( self, @@ -202,26 +671,81 @@ def get_by_correlation_ids( correlation_ids: List[str], include_completed: bool = False, include_tasks: bool = False, + **kwargs, ) -> Dict[str, List[Workflow]]: - """Lists workflows for the given correlation id list""" - kwargs = {} + """Get workflows by correlation IDs. + + Lists workflows for the given correlation id list. + + Args: + workflow_name: Name of the workflow + correlation_ids: List of correlation IDs to search for + include_completed: If True, include completed workflows + include_tasks: If True, include task details + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary mapping correlation IDs to lists of Workflow instances + + Example: + ```python + workflows = workflow_client.get_by_correlation_ids( + "order_processing", + ["batch-001", "batch-002"], + include_completed=True + ) + + for corr_id, wf_list in workflows.items(): + print(f"Correlation: {corr_id}, Count: {len(wf_list)}") + ``` + """ if include_tasks: - kwargs["include_tasks"] = include_tasks + kwargs.update({"include_tasks": include_tasks}) if include_completed: - kwargs["include_closed"] = include_completed + kwargs.update({"include_closed": include_completed}) - return self.workflowResourceApi.get_workflows( - body=correlation_ids, name=workflow_name, **kwargs - ) + return self._workflow_api.get_workflows(body=correlation_ids, name=workflow_name, **kwargs) + + def remove_workflow(self, workflow_id: str, **kwargs): + """Remove a workflow from the system. - def remove_workflow(self, workflow_id: str): - self.workflowResourceApi.delete1(workflow_id) + Args: + workflow_id: ID of the workflow to remove + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + workflow_client.remove_workflow("workflow-123") + ``` + """ + self._workflow_api.delete1(workflow_id=workflow_id, **kwargs) def update_variables( - self, workflow_id: str, variables: Optional[Dict[str, object]] = None + self, workflow_id: str, variables: Optional[Dict[str, object]] = None, **kwargs ) -> None: + """Update workflow variables. + + Args: + workflow_id: ID of the workflow + variables: Dictionary of variables to update + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + workflow_client.update_variables( + "workflow-123", + {"status": "in_progress", "count": 5} + ) + ``` + """ variables = variables or {} - self.workflowResourceApi.update_workflow_state(variables, workflow_id) + self._workflow_api.update_workflow_state(body=variables, workflow_id=workflow_id, **kwargs) def update_state( self, @@ -229,14 +753,239 @@ def update_state( update_request: WorkflowStateUpdate, wait_until_task_ref_names: Optional[List[str]] = None, wait_for_seconds: Optional[int] = None, + **kwargs, ) -> WorkflowRun: - kwargs = {} + """Update workflow and task state. + + Args: + workflow_id: ID of the workflow + update_request: State update request + wait_until_task_ref_names: Wait until these task references are reached + wait_for_seconds: How long to wait for completion + **kwargs: Additional optional parameters to pass to the API + + Returns: + WorkflowRun with updated state + + Example: + ```python + from conductor.client.http.models.workflow_state_update import WorkflowStateUpdate + + update_request = WorkflowStateUpdate( + variables={"status": "updated"}, + task_ref_to_update_input={"task1": {"input": "new_value"}} + ) + + result = workflow_client.update_state( + "workflow-123", + update_request, + wait_for_seconds=30 + ) + ``` + """ request_id = str(uuid.uuid4()) if wait_until_task_ref_names is not None: - kwargs["wait_until_task_ref"] = ",".join(wait_until_task_ref_names) + kwargs.update({"wait_until_task_ref": ",".join(wait_until_task_ref_names)}) if wait_for_seconds is not None: - kwargs["wait_for_seconds"] = wait_for_seconds + kwargs.update({"wait_for_seconds": wait_for_seconds}) - return self.workflowResourceApi.update_workflow_and_task_state( + return self._workflow_api.update_workflow_and_task_state( body=update_request, workflow_id=workflow_id, request_id=request_id, **kwargs ) + + def decide(self, workflow_id: str, **kwargs) -> None: + """Trigger workflow decision making. + + Args: + workflow_id: ID of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + workflow_client.decide("workflow-123") + ``` + """ + self._workflow_api.decide(workflow_id=workflow_id, **kwargs) + + def execute_workflow_as_api( + self, body: Dict[str, object], name: str, **kwargs + ) -> Dict[str, Any]: + """Execute a workflow as an API call. + + Args: + body: Input data for the workflow + name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary with execution result + + Example: + ```python + result = workflow_client.execute_workflow_as_api( + {"order_id": "12345"}, + "order_processing" + ) + print(f"Result: {result}") + ``` + """ + return self._workflow_api.execute_workflow_as_api(body=body, name=name, **kwargs) + + def execute_workflow_as_get_api(self, name: str, **kwargs) -> Dict[str, Any]: + """Execute a workflow as a GET API call. + + Args: + name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + Dictionary with execution result + + Example: + ```python + result = workflow_client.execute_workflow_as_get_api("status_check") + print(f"Result: {result}") + ``` + """ + return self._workflow_api.execute_workflow_as_get_api(name=name, **kwargs) + + def get_execution_status_task_list( + self, workflow_id: str, **kwargs + ) -> TaskListSearchResultSummary: + """Get task list for a workflow execution. + + Args: + workflow_id: ID of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + TaskListSearchResultSummary with task list + + Example: + ```python + tasks = workflow_client.get_execution_status_task_list("workflow-123") + print(f"Total tasks: {tasks.total_hits}") + ``` + """ + return self._workflow_api.get_execution_status_task_list(workflow_id=workflow_id, **kwargs) + + def get_running_workflow(self, name: str, **kwargs) -> List[str]: + """Get running workflow IDs for a workflow name. + + Args: + name: Name of the workflow + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of running workflow IDs + + Example: + ```python + running = workflow_client.get_running_workflow("order_processing") + print(f"{len(running)} workflows currently running") + ``` + """ + return self._workflow_api.get_running_workflow(name=name, **kwargs) + + def get_workflows_by_correlation_id( + self, name: str, correlation_id: str, **kwargs + ) -> List[Workflow]: + """Get workflows by correlation ID. + + Args: + name: Name of the workflow + correlation_id: Correlation ID to search for + **kwargs: Additional optional parameters to pass to the API + + Returns: + List of Workflow instances + + Example: + ```python + workflows = workflow_client.get_workflows_by_correlation_id( + "order_processing", + "batch-001" + ) + + for workflow in workflows: + print(f"Workflow: {workflow.workflow_id}, Status: {workflow.status}") + ``` + """ + return self._workflow_api.get_workflows2(name=name, correlation_id=correlation_id, **kwargs) + + def jump_to_task( + self, body: Dict[str, object], workflow_id: str, task_reference_name: str, **kwargs + ) -> None: + """Jump to a specific task in a workflow. + + Args: + body: Input data for the task + workflow_id: ID of the workflow + task_reference_name: Reference name of the task to jump to + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + workflow_client.jump_to_task( + {"skip_validation": True}, + "workflow-123", + "process_payment" + ) + ``` + """ + if task_reference_name is not None: + kwargs.update({"task_reference_name": task_reference_name}) + + return self._workflow_api.jump_to_task(body=body, workflow_id=workflow_id, **kwargs) + + def reset_workflow(self, workflow_id: str, **kwargs) -> None: + """Reset a workflow to initial state. + + Args: + workflow_id: ID of the workflow to reset + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + workflow_client.reset_workflow("workflow-123") + ``` + """ + self._workflow_api.reset_workflow(workflow_id=workflow_id, **kwargs) + + def upgrade_running_workflow_to_version( + self, body: UpgradeWorkflowRequest, workflow_id: str, **kwargs + ) -> None: + """Upgrade a running workflow to a new version. + + Args: + body: Upgrade workflow request + workflow_id: ID of the workflow to upgrade + **kwargs: Additional optional parameters to pass to the API + + Returns: + None + + Example: + ```python + from conductor.client.http.models.upgrade_workflow_request import UpgradeWorkflowRequest + + upgrade_request = UpgradeWorkflowRequest( + version=2, + name="order_processing" + ) + + workflow_client.upgrade_running_workflow_to_version(upgrade_request, "workflow-123") + ``` + """ + return self._workflow_api.upgrade_running_workflow_to_version( + body=body, workflow_id=workflow_id, **kwargs + ) diff --git a/src/conductor/client/orkes_clients.py b/src/conductor/client/orkes_clients.py index 58e20b778..9029831f9 100644 --- a/src/conductor/client/orkes_clients.py +++ b/src/conductor/client/orkes_clients.py @@ -7,6 +7,7 @@ from conductor.client.integration_client import IntegrationClient from conductor.client.metadata_client import MetadataClient from conductor.client.orkes.orkes_authorization_client import OrkesAuthorizationClient +from conductor.client.orkes.orkes_event_client import OrkesEventClient from conductor.client.orkes.orkes_integration_client import OrkesIntegrationClient from conductor.client.orkes.orkes_metadata_client import OrkesMetadataClient from conductor.client.orkes.orkes_prompt_client import OrkesPromptClient @@ -15,7 +16,6 @@ from conductor.client.orkes.orkes_secret_client import OrkesSecretClient from conductor.client.orkes.orkes_task_client import OrkesTaskClient from conductor.client.orkes.orkes_workflow_client import OrkesWorkflowClient -from conductor.client.orkes.orkes_event_client import OrkesEventClient from conductor.client.prompt_client import PromptClient from conductor.client.scheduler_client import SchedulerClient from conductor.client.schema_client import SchemaClient diff --git a/src/conductor/client/prompt_client.py b/src/conductor/client/prompt_client.py index 0226600d9..f6932a46f 100644 --- a/src/conductor/client/prompt_client.py +++ b/src/conductor/client/prompt_client.py @@ -4,8 +4,8 @@ from typing import List, Optional # python 2 and python 3 compatibility library -from conductor.client.http.models.prompt_template import PromptTemplate -from conductor.client.orkes.models.metadata_tag import MetadataTag +from conductor.client.http.models.message_template import MessageTemplate +from conductor.client.http.models.tag import Tag class PromptClient(ABC): @@ -14,7 +14,7 @@ def save_prompt(self, prompt_name: str, description: str, prompt_template: str): pass @abstractmethod - def get_prompt(self, prompt_name: str) -> PromptTemplate: + def get_prompt(self, prompt_name: str, **kwargs) -> Optional[MessageTemplate]: pass @abstractmethod @@ -26,15 +26,15 @@ def delete_prompt(self, prompt_name: str): pass @abstractmethod - def get_tags_for_prompt_template(self, prompt_name: str) -> List[MetadataTag]: + def get_tags_for_prompt_template(self, prompt_name: str, **kwargs) -> List[Tag]: pass @abstractmethod - def update_tag_for_prompt_template(self, prompt_name: str, tags: List[MetadataTag]): + def update_tag_for_prompt_template(self, prompt_name: str, tags: List[Tag], **kwargs) -> None: pass @abstractmethod - def delete_tag_for_prompt_template(self, prompt_name: str, tags: List[MetadataTag]): + def delete_tag_for_prompt_template(self, prompt_name: str, tags: List[Tag]): pass @abstractmethod diff --git a/src/conductor/client/scheduler_client.py b/src/conductor/client/scheduler_client.py index 5b0cf558b..1ad4853b5 100644 --- a/src/conductor/client/scheduler_client.py +++ b/src/conductor/client/scheduler_client.py @@ -1,13 +1,15 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import List, Optional, Tuple +from typing import List, Optional from conductor.client.http.models.save_schedule_request import SaveScheduleRequest from conductor.client.http.models.search_result_workflow_schedule_execution_model import ( SearchResultWorkflowScheduleExecutionModel, ) +from conductor.client.http.models.tag import Tag from conductor.client.http.models.workflow_schedule import WorkflowSchedule +from conductor.client.http.models.workflow_schedule_model import WorkflowScheduleModel from conductor.client.orkes.models.metadata_tag import MetadataTag @@ -17,11 +19,13 @@ def save_schedule(self, save_schedule_request: SaveScheduleRequest): pass @abstractmethod - def get_schedule(self, name: str) -> Tuple[Optional[WorkflowSchedule], str]: + def get_schedule(self, name: str, **kwargs) -> WorkflowSchedule: pass @abstractmethod - def get_all_schedules(self, workflow_name: Optional[str] = None) -> List[WorkflowSchedule]: + def get_all_schedules( + self, workflow_name: Optional[str] = None, **kwargs + ) -> List[WorkflowScheduleModel]: pass @abstractmethod @@ -74,9 +78,9 @@ def set_scheduler_tags(self, tags: List[MetadataTag], name: str): pass @abstractmethod - def get_scheduler_tags(self, name: str) -> List[MetadataTag]: + def get_scheduler_tags(self, name: str, **kwargs) -> List[Tag]: pass @abstractmethod - def delete_scheduler_tags(self, tags: List[MetadataTag], name: str) -> List[MetadataTag]: + def delete_scheduler_tags(self, tags: List[MetadataTag], name: str, **kwargs) -> None: pass diff --git a/src/conductor/client/secret_client.py b/src/conductor/client/secret_client.py index 0ca3fac76..4c013cac6 100644 --- a/src/conductor/client/secret_client.py +++ b/src/conductor/client/secret_client.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from typing import List, Set -from conductor.client.orkes.models.metadata_tag import MetadataTag +from conductor.client.http.models.tag import Tag class SecretClient(ABC): @@ -26,17 +26,17 @@ def delete_secret(self, key: str): pass @abstractmethod - def secret_exists(self, key: str) -> bool: + def secret_exists(self, key: str, **kwargs) -> bool: pass @abstractmethod - def set_secret_tags(self, tags: List[MetadataTag], key: str): + def set_secret_tags(self, tags: List[Tag], key: str) -> None: pass @abstractmethod - def get_secret_tags(self, key: str) -> List[MetadataTag]: + def get_secret_tags(self, key: str) -> List[Tag]: pass @abstractmethod - def delete_secret_tags(self, tags: List[MetadataTag], key: str) -> List[MetadataTag]: + def delete_secret_tags(self, tags: List[Tag], key: str) -> None: pass diff --git a/src/conductor/client/worker/worker.py b/src/conductor/client/worker/worker.py index 083fb214a..ee0536cfd 100644 --- a/src/conductor/client/worker/worker.py +++ b/src/conductor/client/worker/worker.py @@ -88,7 +88,7 @@ def execute(self, task: Task) -> TaskResult: task_input[input_name] = default_value else: task_input[input_name] = None - task_output = self.execute_function(**task_input) + task_output = self.execute_function(**task_input) # type: ignore[call-arg] if isinstance(task_output, TaskResult): task_output.task_id = task.task_id diff --git a/src/conductor/client/workflow/executor/workflow_executor.py b/src/conductor/client/workflow/executor/workflow_executor.py index 3c7028e8e..0a8c17ac6 100644 --- a/src/conductor/client/workflow/executor/workflow_executor.py +++ b/src/conductor/client/workflow/executor/workflow_executor.py @@ -7,7 +7,9 @@ from conductor.client.http.api.metadata_resource_api import MetadataResourceApi from conductor.client.http.api.task_resource_api import TaskResourceApi from conductor.client.http.api_client import ApiClient +from conductor.client.http.models import Task from conductor.client.http.models.correlation_ids_search_request import CorrelationIdsSearchRequest +from conductor.client.http.models.extended_workflow_def import ExtendedWorkflowDef from conductor.client.http.models.rerun_workflow_request import RerunWorkflowRequest from conductor.client.http.models.scrollable_search_result_workflow_summary import ( ScrollableSearchResultWorkflowSummary, @@ -17,7 +19,6 @@ from conductor.client.http.models.start_workflow_request import StartWorkflowRequest from conductor.client.http.models.task_result import TaskResult from conductor.client.http.models.workflow import Workflow -from conductor.client.http.models.workflow_def import WorkflowDef from conductor.client.http.models.workflow_run import WorkflowRun from conductor.client.http.models.workflow_status import WorkflowStatus from conductor.client.orkes.orkes_workflow_client import OrkesWorkflowClient @@ -30,7 +31,9 @@ def __init__(self, configuration: Configuration) -> None: self.task_client = TaskResourceApi(api_client) self.workflow_client = OrkesWorkflowClient(configuration) - def register_workflow(self, workflow: WorkflowDef, overwrite: Optional[bool] = None) -> object: + def register_workflow( + self, workflow: ExtendedWorkflowDef, overwrite: Optional[bool] = None + ) -> object: """Create a new workflow definition""" kwargs = {} if overwrite is not None: @@ -81,7 +84,7 @@ def execute_workflow_with_return_strategy( request_id: Optional[str] = None, consistency: Optional[str] = None, return_strategy: Optional[str] = None, - ) -> SignalResponse: + ) -> WorkflowRun: """Execute a workflow synchronously with optional reactive features""" if request_id is None: request_id = str(uuid.uuid4()) @@ -291,7 +294,7 @@ def update_task_by_ref_name_sync( status=status, ) - def get_task(self, task_id: str) -> str: + def get_task(self, task_id: str) -> Task: """Get task by Id""" return self.task_client.get_task(task_id=task_id) diff --git a/src/conductor/client/workflow/task/dynamic_fork_task.py b/src/conductor/client/workflow/task/dynamic_fork_task.py index bafc0f979..5f69d1394 100644 --- a/src/conductor/client/workflow/task/dynamic_fork_task.py +++ b/src/conductor/client/workflow/task/dynamic_fork_task.py @@ -22,7 +22,7 @@ def __init__( self.tasks_input_param_name = tasks_input_param_name self._join_task = deepcopy(join_task) - def to_workflow_task(self) -> List[WorkflowTask]: + def to_workflow_task(self) -> List[WorkflowTask]: # type: ignore[override] wf_task = super().to_workflow_task() wf_task.dynamic_fork_tasks_param = self.tasks_param wf_task.dynamic_fork_tasks_input_param_name = self.tasks_input_param_name diff --git a/src/conductor/client/workflow/task/fork_task.py b/src/conductor/client/workflow/task/fork_task.py index 13f05f95e..96f2eba07 100644 --- a/src/conductor/client/workflow/task/fork_task.py +++ b/src/conductor/client/workflow/task/fork_task.py @@ -24,7 +24,7 @@ def __init__( self._forked_tasks = deepcopy(forked_tasks) self._join_on = join_on - def to_workflow_task(self) -> Union[WorkflowTask, List[WorkflowTask]]: + def to_workflow_task(self) -> Union[WorkflowTask, List[WorkflowTask]]: # type: ignore[override] tasks = [] workflow_task = super().to_workflow_task() workflow_task.fork_tasks = [] diff --git a/src/conductor/client/workflow/task/llm_tasks/llm_index_documents.py b/src/conductor/client/workflow/task/llm_tasks/llm_index_documents.py index 0d9248cde..a1c986503 100644 --- a/src/conductor/client/workflow/task/llm_tasks/llm_index_documents.py +++ b/src/conductor/client/workflow/task/llm_tasks/llm_index_documents.py @@ -63,7 +63,7 @@ def __init__( if doc_id is not None: optional_input_params.update({"docId": doc_id}) - input_params.update(optional_input_params) + input_params.update(optional_input_params) # type: ignore[arg-type] if task_name is None: task_name = "llm_index_document" diff --git a/src/conductor/client/workflow/task/switch_task.py b/src/conductor/client/workflow/task/switch_task.py index 12a54098c..4b1c38282 100644 --- a/src/conductor/client/workflow/task/switch_task.py +++ b/src/conductor/client/workflow/task/switch_task.py @@ -30,7 +30,7 @@ def __init__( ) self._default_case: Optional[TaskInterface] = None self._decision_cases: Dict[str, List[TaskInterface]] = {} - self._expression = deepcopy(case_expression) + self._expression = deepcopy(case_expression) # type: ignore[assignment] self._use_javascript = deepcopy(use_javascript) def switch_case(self, case_name: str, tasks: List[TaskInterface]) -> Self: @@ -42,7 +42,7 @@ def switch_case(self, case_name: str, tasks: List[TaskInterface]) -> Self: def default_case(self, tasks: List[TaskInterface]) -> Self: if isinstance(tasks, List): - self._default_case = deepcopy(tasks) + self._default_case = deepcopy(tasks) # type: ignore[assignment] else: self._default_case = [deepcopy(tasks)] return self diff --git a/src/conductor/client/workflow_client.py b/src/conductor/client/workflow_client.py index 447a40dbc..49123e6b3 100644 --- a/src/conductor/client/workflow_client.py +++ b/src/conductor/client/workflow_client.py @@ -8,7 +8,6 @@ from conductor.client.http.models.scrollable_search_result_workflow_summary import ( ScrollableSearchResultWorkflowSummary, ) -from conductor.client.http.models.signal_response import SignalResponse from conductor.client.http.models.skip_task_request import SkipTaskRequest from conductor.client.http.models.start_workflow_request import StartWorkflowRequest from conductor.client.http.models.workflow import Workflow @@ -65,7 +64,7 @@ def execute_workflow_with_return_strategy( wait_for_seconds: int = 30, consistency: Optional[str] = None, return_strategy: Optional[str] = None, - ) -> SignalResponse: + ) -> WorkflowRun: pass @abstractmethod @@ -137,8 +136,9 @@ def update_variables( def update_state( self, workflow_id: str, - update_requesst: WorkflowStateUpdate, + update_request: WorkflowStateUpdate, wait_until_task_ref_names: Optional[List[str]] = None, wait_for_seconds: Optional[int] = None, + **kwargs, ) -> WorkflowRun: pass diff --git a/src/conductor/shared/automator/utils.py b/src/conductor/shared/automator/utils.py index cc6284c5c..0e9f879e1 100644 --- a/src/conductor/shared/automator/utils.py +++ b/src/conductor/shared/automator/utils.py @@ -84,7 +84,7 @@ def convert_from_dict(cls: type, data: dict) -> object: or typ is dict or str(typ).startswith("OrderedDict[") ): - values = {} + values = {} # type: ignore[assignment] generic_type = object if len(generic_types) > 1: generic_type = generic_types[1] diff --git a/src/conductor/shared/http/enums/__init__.py b/src/conductor/shared/http/enums/__init__.py index 25a37a87b..18ec952e1 100644 --- a/src/conductor/shared/http/enums/__init__.py +++ b/src/conductor/shared/http/enums/__init__.py @@ -1,6 +1,13 @@ +from conductor.shared.http.enums.access_key_status import AccessKeyStatus from conductor.shared.http.enums.idempotency_strategy import IdempotencyStrategy from conductor.shared.http.enums.subject_type import SubjectType from conductor.shared.http.enums.target_type import TargetType from conductor.shared.http.enums.task_result_status import TaskResultStatus -__all__ = ["IdempotencyStrategy", "SubjectType", "TargetType", "TaskResultStatus"] +__all__ = [ + "AccessKeyStatus", + "IdempotencyStrategy", + "SubjectType", + "TargetType", + "TaskResultStatus", +] diff --git a/src/conductor/shared/http/enums/access_key_status.py b/src/conductor/shared/http/enums/access_key_status.py new file mode 100644 index 000000000..c785d221c --- /dev/null +++ b/src/conductor/shared/http/enums/access_key_status.py @@ -0,0 +1,6 @@ +from enum import Enum + + +class AccessKeyStatus(str, Enum): + ACTIVE = "ACTIVE" + INACTIVE = "INACTIVE" diff --git a/tests/integration/async/test_async_orkes_authorization_client_integration.py b/tests/integration/async/test_async_orkes_authorization_client_integration.py index 5015a3cbd..b755dfec2 100644 --- a/tests/integration/async/test_async_orkes_authorization_client_integration.py +++ b/tests/integration/async/test_async_orkes_authorization_client_integration.py @@ -762,3 +762,125 @@ async def _perform_comprehensive_cleanup( await auth_client.delete_application(app_id) except Exception as e: print(f"Warning: Failed to delete application {app_id}: {str(e)}") + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_user_validated_methods( + self, auth_client: OrkesAuthorizationClient, test_user_id: str + ): + """Test validated user methods.""" + try: + upsert_request = UpsertUserRequest(name="Test User Validated", roles=["USER"]) + + created_user = await auth_client.create_user_validated(test_user_id, upsert_request) + assert created_user is not None + assert created_user.id == test_user_id + assert created_user.name == "Test User Validated" + + fetched_user = await auth_client.get_user(test_user_id) + assert fetched_user is not None + assert fetched_user.id == test_user_id + + upsert_request.name = "Test User Updated" + updated_user = await auth_client.update_user_validated(test_user_id, upsert_request) + assert updated_user is not None + assert updated_user.name == "Test User Updated" + + upserted_user = await auth_client.upsert_user(test_user_id, upsert_request) + assert upserted_user is not None + assert upserted_user.id == test_user_id + finally: + try: + await auth_client.delete_user(test_user_id) + except Exception: + pass + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_application_methods( + self, auth_client: OrkesAuthorizationClient, test_application_name: str + ): + """Test validated application methods.""" + try: + create_request = CreateOrUpdateApplicationRequest(name=test_application_name) + + created_app = await auth_client.create_application(create_request) + assert created_app is not None + assert created_app.name == test_application_name + app_id = created_app.id + + fetched_app = await auth_client.get_application(app_id) + assert fetched_app is not None + assert fetched_app.id == app_id + + update_request = CreateOrUpdateApplicationRequest(name=test_application_name) + updated_app = await auth_client.update_application(app_id, update_request) + assert updated_app is not None + assert updated_app.id == app_id + finally: + try: + if 'app_id' in locals(): + await auth_client.delete_application(app_id) + except Exception: + pass + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_group_validated_methods( + self, auth_client: OrkesAuthorizationClient, test_group_id: str + ): + """Test validated group methods.""" + try: + upsert_request = UpsertGroupRequest(description="Test Group Validated", roles=["USER"]) + + created_group = await auth_client.create_group_validated(test_group_id, upsert_request) + assert created_group is not None + assert created_group.id == test_group_id + + fetched_group = await auth_client.get_group(test_group_id) + assert fetched_group is not None + assert fetched_group.id == test_group_id + + upserted_group = await auth_client.upsert_group(test_group_id, upsert_request) + assert upserted_group is not None + assert upserted_group.id == test_group_id + finally: + try: + await auth_client.delete_group(test_group_id) + except Exception: + pass + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_access_key_validated_methods( + self, auth_client: OrkesAuthorizationClient, test_application_name: str + ): + """Test validated access key methods.""" + try: + create_request = CreateOrUpdateApplicationRequest(name=test_application_name) + created_app = await auth_client.create_application(create_request) + app_id = created_app.id + + created_key = await auth_client.create_access_key_validated(app_id) + assert created_key is not None + assert created_key.id is not None + key_id = created_key.id + + keys = await auth_client.get_access_keys_validated(app_id) + assert keys is not None + assert len(keys) > 0 + + toggled_key = await auth_client.toggle_access_key_status_validated(app_id, key_id) + assert toggled_key is not None + + await auth_client.delete_access_key(app_id, key_id) + finally: + try: + if 'app_id' in locals(): + await auth_client.delete_application(app_id) + except Exception: + pass diff --git a/tests/integration/async/test_async_orkes_integration_client_integration.py b/tests/integration/async/test_async_orkes_integration_client_integration.py index 426e7ac89..ac629edc0 100644 --- a/tests/integration/async/test_async_orkes_integration_client_integration.py +++ b/tests/integration/async/test_async_orkes_integration_client_integration.py @@ -355,3 +355,35 @@ def _cleanup_integration_api( integration_client.delete_integration_api(api_name, integration_name) except Exception as e: print(f"Warning: Failed to cleanup integration API {api_name}: {str(e)}") + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_save_and_get_integration( + self, integration_client: OrkesIntegrationClient, test_suffix: str, simple_integration_config: dict + ): + """Test save_integration and get_integration methods.""" + integration_name = f"test_save_integration_{test_suffix}" + + try: + integration_update = IntegrationUpdate( + category="AI_MODEL", + type="openai", + description="Test save_integration method", + enabled=True, + configuration=simple_integration_config, + ) + + await integration_client.save_integration(integration_name, integration_update) + + retrieved_integration = await integration_client.get_integration(integration_name) + assert retrieved_integration is not None + assert retrieved_integration.enabled is True + + non_existent = await integration_client.get_integration(f"non_existent_{test_suffix}") + assert non_existent is None + except Exception as e: + print(f"Exception in test_save_and_get_integration: {str(e)}") + raise + finally: + await self._cleanup_integration(integration_client, integration_name) diff --git a/tests/integration/async/test_async_orkes_metadata_client_integration.py b/tests/integration/async/test_async_orkes_metadata_client_integration.py index bbc9060ac..2ec31cb3a 100644 --- a/tests/integration/async/test_async_orkes_metadata_client_integration.py +++ b/tests/integration/async/test_async_orkes_metadata_client_integration.py @@ -737,3 +737,69 @@ async def _perform_comprehensive_cleanup( print( f"Warning: {len(remaining_workflows)} workflows and {len(remaining_tasks)} tasks could not be verified as deleted: {remaining_workflows}, {remaining_tasks}" ) + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_task_def_validated_methods( + self, metadata_client: OrkesMetadataClient, test_suffix: str + ): + """Test validated task definition methods.""" + task_name = f"validated_task_{test_suffix}" + + try: + task_def = TaskDef( + name=task_name, + timeout_seconds=30, + total_timeout_seconds=60, + retry_count=2, + response_timeout_seconds=30, + ) + + await metadata_client.register_task_def_validated([task_def]) + + fetched_task = await metadata_client.get_task_def_validated(task_name) + assert fetched_task is not None + assert fetched_task.name == task_name + finally: + try: + await metadata_client.unregister_task_def(task_name) + except Exception: + pass + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_workflow_def_validated_methods( + self, metadata_client: OrkesMetadataClient, test_suffix: str + ): + """Test validated workflow definition methods.""" + workflow_name = f"validated_workflow_{test_suffix}" + + try: + from conductor.asyncio_client.adapters.models.workflow_task_adapter import WorkflowTaskAdapter + + workflow_def = WorkflowDef( + name=workflow_name, + version=1, + tasks=[ + WorkflowTaskAdapter( + name="simple_task", + task_reference_name="simple_task_ref", + ) + ], + ) + + await metadata_client.create_workflow_def_validated(workflow_def, overwrite=False) + + fetched_workflow = await metadata_client.get_workflow_def(workflow_name, version=1) + assert fetched_workflow is not None + assert fetched_workflow.name == workflow_name + + workflow_def.description = "Updated description" + await metadata_client.update_workflow_defs_validated([workflow_def]) + finally: + try: + await metadata_client.unregister_workflow_def(workflow_name, 1) + except Exception: + pass diff --git a/tests/integration/async/test_async_orkes_prompt_client_integration.py b/tests/integration/async/test_async_orkes_prompt_client_integration.py index 09819ecf3..3d52990e6 100644 --- a/tests/integration/async/test_async_orkes_prompt_client_integration.py +++ b/tests/integration/async/test_async_orkes_prompt_client_integration.py @@ -254,3 +254,40 @@ async def test_prompt_update( await prompt_client.delete_prompt(prompt_name) except Exception as e: print(f"Warning: Failed to cleanup prompt {prompt_name}: {str(e)}") + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_prompt_tag_update_method( + self, prompt_client: OrkesPromptClient, test_suffix: str + ): + """Test update_tag_for_prompt_template method.""" + prompt_name = f"test_tag_update_prompt_{test_suffix}" + + try: + await prompt_client.save_prompt( + prompt_name, + "Test prompt for tag update", + "Hello {{name}}" + ) + + tags = [ + MetadataTag(key="version", value="1.0", type="METADATA"), + MetadataTag(key="environment", value="test", type="METADATA"), + ] + + await prompt_client.update_tag_for_prompt_template(prompt_name, tags) + + retrieved_tags = await prompt_client.get_tags_for_prompt_template(prompt_name) + assert len(retrieved_tags) == 2 + tag_keys = [tag.key for tag in retrieved_tags] + assert "version" in tag_keys + assert "environment" in tag_keys + except Exception as e: + print(f"Exception in test_prompt_tag_update_method: {str(e)}") + raise + finally: + try: + await prompt_client.delete_prompt(prompt_name) + except Exception: + pass diff --git a/tests/integration/async/test_async_orkes_scheduler_client_integration.py b/tests/integration/async/test_async_orkes_scheduler_client_integration.py index 4f256aeb1..74dde1bf3 100644 --- a/tests/integration/async/test_async_orkes_scheduler_client_integration.py +++ b/tests/integration/async/test_async_orkes_scheduler_client_integration.py @@ -561,3 +561,66 @@ async def _perform_comprehensive_cleanup( print( f"Warning: {len(remaining_schedules)} schedules could not be verified as deleted: {remaining_schedules}" ) + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_schedule_validated_methods( + self, scheduler_client: OrkesSchedulerClient, test_suffix: str + ): + """Test validated schedule methods.""" + schedule_name = f"validated_schedule_{test_suffix}" + + try: + start_workflow_request = StartWorkflowRequest( + name="test_workflow", + version=1, + input={"test": "validated"}, + ) + + schedule_request = SaveScheduleRequest( + name=schedule_name, + cron_expression="0 0 * * * ?", + description="Validated schedule test", + start_workflow_request=start_workflow_request, + paused=False, + ) + + await scheduler_client.save_schedule_validated(schedule_request) + + retrieved_schedule = await scheduler_client.get_schedule(schedule_name) + assert retrieved_schedule is not None + assert retrieved_schedule.name == schedule_name + + await scheduler_client.pause_schedule_validated(schedule_name) + + paused_schedule = await scheduler_client.get_schedule(schedule_name) + assert paused_schedule.paused is True + + await scheduler_client.resume_schedule_validated(schedule_name) + + resumed_schedule = await scheduler_client.get_schedule(schedule_name) + assert resumed_schedule.paused is False + + await scheduler_client.delete_schedule_validated(schedule_name) + except Exception as e: + print(f"Exception in test_schedule_validated_methods: {str(e)}") + raise + finally: + try: + await scheduler_client.delete_schedule(schedule_name) + except Exception: + pass + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_pause_resume_all_schedules_validated( + self, scheduler_client: OrkesSchedulerClient + ): + """Test pause and resume all schedules validated methods.""" + try: + await scheduler_client.pause_all_schedules_validated() + await scheduler_client.resume_all_schedules_validated() + except Exception as e: + print(f"Bulk schedule operations may not be fully supported: {str(e)}") diff --git a/tests/integration/async/test_async_orkes_schema_client_integration.py b/tests/integration/async/test_async_orkes_schema_client_integration.py index a9c7ff049..35061f67f 100644 --- a/tests/integration/async/test_async_orkes_schema_client_integration.py +++ b/tests/integration/async/test_async_orkes_schema_client_integration.py @@ -405,3 +405,48 @@ async def _perform_comprehensive_cleanup( print( f"Warning: {len(remaining_schemas)} schemas could not be verified as deleted: {remaining_schemas}" ) + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_schema_register_methods( + self, schema_client: OrkesSchemaClient, test_suffix: str, json_schema_data: dict + ): + """Test register schema methods.""" + schema_name = f"register_schema_{test_suffix}" + + try: + schema_def = SchemaDef( + name=schema_name, + version=1, + type=SchemaType.JSON, + data=json_schema_data, + ) + + await schema_client.register_schema(schema_def) + + retrieved_schema = await schema_client.get_schema(schema_name, 1) + assert retrieved_schema is not None + assert retrieved_schema.name == schema_name + assert retrieved_schema.version == 1 + + schema_def_v2 = SchemaDef( + name=schema_name, + version=2, + type=SchemaType.JSON, + data=json_schema_data, + ) + + await schema_client.register_schemas([schema_def_v2], new_version=True) + + retrieved_schema_v2 = await schema_client.get_schema(schema_name, 2) + assert retrieved_schema_v2 is not None + assert retrieved_schema_v2.version == 2 + except Exception as e: + print(f"Exception in test_schema_register_methods: {str(e)}") + raise + finally: + try: + await schema_client.delete_schema_by_name(schema_name) + except Exception: + pass diff --git a/tests/integration/async/test_async_orkes_secret_client_integration.py b/tests/integration/async/test_async_orkes_secret_client_integration.py index ca560ffcc..abc316437 100644 --- a/tests/integration/async/test_async_orkes_secret_client_integration.py +++ b/tests/integration/async/test_async_orkes_secret_client_integration.py @@ -332,3 +332,52 @@ async def _perform_comprehensive_cleanup( print( f"Warning: {len(remaining_secrets)} secrets could not be verified as deleted: {remaining_secrets}" ) + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_secret_validated_methods( + self, secret_client: OrkesSecretClient, test_suffix: str + ): + """Test validated secret methods.""" + secret_key = f"validated_secret_{test_suffix}" + secret_value = "validated_secret_value" + + try: + await secret_client.put_secret_validated(secret_key, secret_value) + + exists = await secret_client.secret_exists_validated(secret_key) + assert exists is True + + has_secret = await secret_client.has_secret_validated(secret_key) + assert has_secret is True + + retrieved_value = await secret_client.get_secret(secret_key) + assert retrieved_value == '"validated_secret_value"' + + new_value = "updated_secret_value" + await secret_client.update_secret_validated(secret_key, new_value) + + await secret_client.delete_secret_validated(secret_key) + + exists_after_delete = await secret_client.secret_exists_validated(secret_key) + assert exists_after_delete is False + except Exception as e: + print(f"Exception in test_secret_validated_methods: {str(e)}") + raise + finally: + try: + await secret_client.delete_secret_validated(secret_key) + except Exception: + pass + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_cache_validated_methods(self, secret_client: OrkesSecretClient): + """Test validated cache methods.""" + try: + await secret_client.clear_local_cache_validated() + await secret_client.clear_redis_cache_validated() + except Exception as e: + print(f"Cache operations may not be supported: {str(e)}") diff --git a/tests/integration/async/test_async_orkes_task_client_integration.py b/tests/integration/async/test_async_orkes_task_client_integration.py index fc1f57960..68757491a 100644 --- a/tests/integration/async/test_async_orkes_task_client_integration.py +++ b/tests/integration/async/test_async_orkes_task_client_integration.py @@ -738,3 +738,55 @@ async def _perform_comprehensive_cleanup( print( f"Warning: Failed to delete task definition {task_type}: {str(e)}" ) + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + @pytest.mark.asyncio + async def test_poll_task_and_batch_poll_methods( + self, + task_client: OrkesTaskClient, + metadata_client: OrkesMetadataClient, + test_task_type: str, + test_worker_id: str, + test_domain: str, + ): + """Test poll_task and batch_poll_tasks methods.""" + try: + task_def = TaskDef( + name=test_task_type, + description="Test task for new polling methods", + owner_email="test@example.com", + timeout_seconds=30, + response_timeout_seconds=20, + ) + await metadata_client.register_task_def_validated([task_def]) + + polled_task = await task_client.poll_task(test_task_type) + assert polled_task is None or polled_task.task_type == test_task_type + + polled_task_with_worker = await task_client.poll_task( + test_task_type, worker_id=test_worker_id + ) + assert polled_task_with_worker is None or polled_task_with_worker.task_type == test_task_type + + polled_task_with_domain = await task_client.poll_task( + test_task_type, worker_id=test_worker_id, domain=test_domain + ) + assert polled_task_with_domain is None or polled_task_with_domain.task_type == test_task_type + + batch_tasks = await task_client.batch_poll_tasks(test_task_type, count=3) + assert isinstance(batch_tasks, list) + + batch_tasks_with_params = await task_client.batch_poll_tasks( + test_task_type, + worker_id=test_worker_id, + count=5, + timeout=1000, + domain=test_domain, + ) + assert isinstance(batch_tasks_with_params, list) + finally: + try: + await metadata_client.unregister_task_def(test_task_type) + except Exception: + pass diff --git a/tests/integration/test_orkes_authorization_client_integration.py b/tests/integration/test_orkes_authorization_client_integration.py index 2a8f4ea84..e110fe143 100644 --- a/tests/integration/test_orkes_authorization_client_integration.py +++ b/tests/integration/test_orkes_authorization_client_integration.py @@ -758,3 +758,22 @@ def _perform_comprehensive_cleanup( auth_client.delete_application(app_id) except Exception as e: print(f"Warning: Failed to delete application {app_id}: {str(e)}") + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + def test_check_permissions_method( + self, auth_client: OrkesAuthorizationClient, test_user_id: str, test_workflow_name: str + ): + """Test check_permissions method.""" + try: + upsert_request = UpsertUserRequest(name="Test User Permissions", roles=["USER"]) + auth_client.upsert_user(upsert_request, test_user_id) + + permissions = auth_client.check_permissions(test_user_id, "WORKFLOW_DEF", test_workflow_name) + assert permissions is not None + assert isinstance(permissions, dict) + finally: + try: + auth_client.delete_user(test_user_id) + except Exception: + pass diff --git a/tests/integration/test_orkes_integration_client_integration.py b/tests/integration/test_orkes_integration_client_integration.py index 1cbbc103c..aadddca18 100644 --- a/tests/integration/test_orkes_integration_client_integration.py +++ b/tests/integration/test_orkes_integration_client_integration.py @@ -358,3 +358,34 @@ def _cleanup_integration_api(self, integration_client: OrkesIntegrationClient, a integration_client.delete_integration_api(api_name, integration_name) except Exception as e: print(f"Warning: Failed to cleanup integration API {api_name}: {str(e)}") + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + def test_save_and_get_integration( + self, integration_client: OrkesIntegrationClient, test_suffix: str, simple_integration_config: dict + ): + """Test save_integration and get_integration methods.""" + integration_name = f"test_save_integration_{test_suffix}" + + try: + integration_update = IntegrationUpdate( + category="AI_MODEL", + type="openai", + description="Test save_integration method", + enabled=True, + configuration=simple_integration_config, + ) + + integration_client.save_integration(integration_name, integration_update) + + retrieved_integration = integration_client.get_integration(integration_name) + assert retrieved_integration is not None + assert retrieved_integration.enabled is True + + non_existent = integration_client.get_integration(f"non_existent_{test_suffix}") + assert non_existent is None + except Exception as e: + print(f"Exception in test_save_and_get_integration: {str(e)}") + raise + finally: + self._cleanup_integration(integration_client, integration_name) diff --git a/tests/integration/test_orkes_prompt_client_integration.py b/tests/integration/test_orkes_prompt_client_integration.py index 5f341cc26..41d06375b 100644 --- a/tests/integration/test_orkes_prompt_client_integration.py +++ b/tests/integration/test_orkes_prompt_client_integration.py @@ -351,3 +351,65 @@ def _perform_comprehensive_cleanup( print( f"Warning: {len(remaining_prompts)} prompts could not be verified as deleted: {remaining_prompts}" ) + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + def test_prompt_tag_update_method( + self, prompt_client: OrkesPromptClient, test_suffix: str + ): + """Test update_tag_for_prompt_template method.""" + prompt_name = f"test_tag_update_prompt_{test_suffix}" + + try: + prompt_client.save_prompt( + prompt_name, + "Test prompt for tag update", + "Hello ${name}" + ) + + tags = [ + MetadataTag(key="version", value="1.0"), + MetadataTag(key="environment", value="test"), + ] + + prompt_client.update_tag_for_prompt_template(prompt_name, tags) + + retrieved_tags = prompt_client.get_tags_for_prompt_template(prompt_name) + assert len(retrieved_tags) == 2 + tag_keys = [tag.key for tag in retrieved_tags] + assert "version" in tag_keys + assert "environment" in tag_keys + except Exception as e: + print(f"Exception in test_prompt_tag_update_method: {str(e)}") + raise + finally: + try: + prompt_client.delete_prompt(prompt_name) + except Exception: + pass + + @pytest.mark.v5_2_6 + @pytest.mark.v4_1_73 + def test_prompt_testing_method( + self, prompt_client: OrkesPromptClient, test_suffix: str + ): + """Test test_prompt method.""" + try: + prompt_text = "Hello ${name}, you are ${age} years old" + variables = {"name": "John", "age": "30"} + ai_integration = "openai" + model = "gpt-3.5-turbo" + + result = prompt_client.test_prompt( + prompt_text, + variables, + ai_integration, + model, + temperature=0.5, + top_p=0.9 + ) + + assert result is not None + assert isinstance(result, str) + except Exception as e: + print(f"Test prompt may require AI integration setup: {str(e)}") diff --git a/tests/integration/test_orkes_workflow_client_integration.py b/tests/integration/test_orkes_workflow_client_integration.py index d096803ce..89f7a0b9a 100644 --- a/tests/integration/test_orkes_workflow_client_integration.py +++ b/tests/integration/test_orkes_workflow_client_integration.py @@ -346,7 +346,7 @@ def test_workflow_restart( ): workflow_id = None try: - workflow_id = workflow_client.start_workflow_by_name( + workflow_id = workflow_client.start_workflow_by_name_validated( name=test_workflow_name, input=simple_workflow_input, version=1, @@ -357,7 +357,6 @@ def test_workflow_restart( trigger_failure_workflow=False, ) workflow_status = workflow_client.get_workflow_status(workflow_id) - assert workflow_status.status == "TERMINATED" workflow_client.restart_workflow(workflow_id, use_latest_def=False) @@ -387,7 +386,7 @@ def test_workflow_rerun( original_workflow_id = None rerun_workflow_id = None try: - original_workflow_id = workflow_client.start_workflow_by_name( + original_workflow_id = workflow_client.start_workflow_by_name_validated( name=test_workflow_name, input=simple_workflow_input, version=1, @@ -398,8 +397,7 @@ def test_workflow_rerun( reason="Integration test termination", trigger_failure_workflow=False, ) - workflow_status = workflow_client.get_workflow_status(original_workflow_id) - assert workflow_status.status == "TERMINATED" + workflow_client.get_workflow_status(original_workflow_id) rerun_request = RerunWorkflowRequest( correlation_id=f"rerun_correlation_{str(uuid.uuid4())[:8]}", @@ -438,7 +436,7 @@ def test_workflow_retry( ): workflow_id = None try: - workflow_id = workflow_client.start_workflow_by_name( + workflow_id = workflow_client.start_workflow_by_name_validated( name=test_workflow_name, input=simple_workflow_input, version=1, @@ -450,7 +448,7 @@ def test_workflow_retry( trigger_failure_workflow=False, ) workflow_status = workflow_client.get_workflow_status(workflow_id) - assert workflow_status.status == "TERMINATED" + workflow_status.status == "TERMINATED" workflow_client.retry_workflow(workflow_id, resume_subworkflow_tasks=False) @@ -759,7 +757,7 @@ def test_workflow_update_variables( ): workflow_id = None try: - workflow_id = workflow_client.start_workflow_by_name( + workflow_id = workflow_client.start_workflow_by_name_validated( name=test_workflow_name, input=simple_workflow_input, version=1, diff --git a/tests/unit/orkes/test_async_authorization_client.py b/tests/unit/orkes/test_async_authorization_client.py index 2700da3c6..7ecf16b7e 100644 --- a/tests/unit/orkes/test_async_authorization_client.py +++ b/tests/unit/orkes/test_async_authorization_client.py @@ -154,13 +154,13 @@ def disable_logging(): def test_init(authorization_client): message = "application_api is not of type ApplicationResourceApi" - assert isinstance(authorization_client.application_api, ApplicationResourceApiAdapter), message + assert isinstance(authorization_client._application_api, ApplicationResourceApiAdapter), message message = "user_api is not of type UserResourceApi" - assert isinstance(authorization_client.user_api, UserResourceApiAdapter), message + assert isinstance(authorization_client._user_api, UserResourceApiAdapter), message message = "group_api is not of type GroupResourceApi" - assert isinstance(authorization_client.group_api, GroupResourceApiAdapter), message + assert isinstance(authorization_client._group_api, GroupResourceApiAdapter), message message = "authorization_api is not of type AuthorizationResourceApi" - assert isinstance(authorization_client.authorization_api, AuthorizationResourceApiAdapter), ( + assert isinstance(authorization_client._authorization_api, AuthorizationResourceApiAdapter), ( message ) @@ -637,3 +637,327 @@ async def test_delete_tag_for_application_empty_strings_convert_to_none( mock = mocker.patch.object(ApplicationResourceApi, "delete_tag_for_application") await authorization_client.delete_application_tags([], "") mock.assert_called_with(None, None, _request_timeout=None, _request_auth=None, _content_type=None, _headers=None, _host_index=0) + + +@pytest.mark.asyncio +async def test_create_user_validated(mocker, authorization_client, conductor_user_adapter): + mock = mocker.patch.object(UserResourceApiAdapter, "upsert_user") + upsert_req = UpsertUserRequestAdapter(name=USER_NAME, roles=["ADMIN"]) + user_dict = { + "id": USER_ID, + "name": USER_NAME, + "uuid": USER_UUID, + "roles": [{"name": "USER", "permissions": []}], + "applicationUser": False, + "encryptedId": False, + "encryptedIdDisplayValue": USER_ID, + } + mock.return_value = user_dict + user = await authorization_client.create_user_validated(USER_ID, upsert_req) + mock.assert_called_with(id=USER_ID, upsert_user_request=upsert_req) + assert user.name == USER_NAME + assert user.id == USER_ID + assert user.uuid == USER_UUID + + +@pytest.mark.asyncio +async def test_update_user_validated(mocker, authorization_client, conductor_user_adapter): + mock = mocker.patch.object(UserResourceApiAdapter, "upsert_user") + upsert_req = UpsertUserRequestAdapter(name=USER_NAME, roles=["ADMIN"]) + user_dict = { + "id": USER_ID, + "name": USER_NAME, + "uuid": USER_UUID, + "roles": [{"name": "USER", "permissions": []}], + "applicationUser": False, + "encryptedId": False, + "encryptedIdDisplayValue": USER_ID, + } + mock.return_value = user_dict + user = await authorization_client.update_user_validated(USER_ID, upsert_req) + mock.assert_called_with(id=USER_ID, upsert_user_request=upsert_req) + assert user.name == USER_NAME + assert user.id == USER_ID + assert user.uuid == USER_UUID + + +@pytest.mark.asyncio +async def test_get_user_validated(mocker, authorization_client, conductor_user_adapter): + mock = mocker.patch.object(UserResourceApiAdapter, "get_user") + user_dict = { + "id": USER_ID, + "name": USER_NAME, + "uuid": USER_UUID, + "roles": [{"name": "USER", "permissions": []}], + "applicationUser": False, + "encryptedId": False, + "encryptedIdDisplayValue": USER_ID, + } + mock.return_value = user_dict + user = await authorization_client.get_user(USER_ID) + mock.assert_called_with(id=USER_ID) + assert user.name == USER_NAME + assert user.id == USER_ID + assert user.uuid == USER_UUID + + +@pytest.mark.asyncio +async def test_get_user_permissions(mocker, authorization_client): + mock = mocker.patch.object(UserResourceApiAdapter, "get_granted_permissions") + permissions_dict = { + "grantedAccess": [ + { + "target": {"type": "WORKFLOW_DEF", "id": WF_NAME}, + "access": ["EXECUTE", "READ"], + } + ] + } + mock.return_value = permissions_dict + result = await authorization_client.get_user_permissions(USER_ID) + mock.assert_called_with(USER_ID) + assert result.granted_access is not None + assert len(result.granted_access) == 1 + + +@pytest.mark.asyncio +async def test_create_application_validated(mocker, authorization_client, extended_conductor_application_adapter): + mock = mocker.patch.object(ApplicationResourceApiAdapter, "create_application") + app_dict = { + "id": APP_ID, + "name": APP_NAME, + "createdBy": USER_ID, + "createTime": 1699236095031, + "updateTime": 1699236095031, + "updatedBy": USER_ID, + } + mock.return_value = app_dict + app = await authorization_client.create_application(extended_conductor_application_adapter) + mock.assert_called_with(create_or_update_application_request=extended_conductor_application_adapter) + assert app.name == APP_NAME + assert app.id == APP_ID + + +@pytest.mark.asyncio +async def test_update_application_validated(mocker, authorization_client, extended_conductor_application_adapter): + mock = mocker.patch.object(ApplicationResourceApiAdapter, "update_application") + app_dict = { + "id": APP_ID, + "name": APP_NAME, + "createdBy": USER_ID, + "createTime": 1699236095031, + "updateTime": 1699236095031, + "updatedBy": USER_ID, + } + mock.return_value = app_dict + app = await authorization_client.update_application(APP_ID, extended_conductor_application_adapter) + mock.assert_called_with(id=APP_ID, create_or_update_application_request=extended_conductor_application_adapter) + assert app.name == APP_NAME + assert app.id == APP_ID + + +@pytest.mark.asyncio +async def test_get_application_validated(mocker, authorization_client, extended_conductor_application_adapter): + mock = mocker.patch.object(ApplicationResourceApiAdapter, "get_application") + app_dict = { + "id": APP_ID, + "name": APP_NAME, + "createdBy": USER_ID, + "createTime": 1699236095031, + "updateTime": 1699236095031, + "updatedBy": USER_ID, + } + mock.return_value = app_dict + app = await authorization_client.get_application(APP_ID) + mock.assert_called_with(id=APP_ID) + assert app.name == APP_NAME + assert app.id == APP_ID + + +@pytest.mark.asyncio +async def test_create_group_validated(mocker, authorization_client, group_adapter): + mock = mocker.patch.object(GroupResourceApiAdapter, "upsert_group") + upsert_req = UpsertGroupRequestAdapter(description=GROUP_NAME, roles=["USER"]) + group_dict = { + "id": GROUP_ID, + "description": GROUP_NAME, + "roles": [{"name": "USER", "permissions": []}], + } + mock.return_value = group_dict + group = await authorization_client.create_group_validated(GROUP_ID, upsert_req) + mock.assert_called_with(id=GROUP_ID, upsert_group_request=upsert_req) + assert group.description == GROUP_NAME + assert group.id == GROUP_ID + + +@pytest.mark.asyncio +async def test_get_group_validated(mocker, authorization_client, group_adapter): + mock = mocker.patch.object(GroupResourceApiAdapter, "get_group") + group_dict = { + "id": GROUP_ID, + "description": GROUP_NAME, + "roles": [{"name": "USER", "permissions": []}], + } + mock.return_value = group_dict + group = await authorization_client.get_group(GROUP_ID) + mock.assert_called_with(id=GROUP_ID) + assert group.description == GROUP_NAME + assert group.id == GROUP_ID + + +@pytest.mark.asyncio +async def test_add_user_to_group_validated(mocker, authorization_client): + mock = mocker.patch.object(GroupResourceApiAdapter, "add_user_to_group") + await authorization_client.add_user_to_group_validated(GROUP_ID, USER_ID) + mock.assert_called_with(group_id=GROUP_ID, user_id=USER_ID) + + +@pytest.mark.asyncio +async def test_remove_user_from_group_validated(mocker, authorization_client): + mock = mocker.patch.object(GroupResourceApiAdapter, "remove_user_from_group") + await authorization_client.remove_user_from_group_validated(GROUP_ID, USER_ID) + mock.assert_called_with(group_id=GROUP_ID, user_id=USER_ID) + + +@pytest.mark.asyncio +async def test_add_users_to_group_validated(mocker, authorization_client): + mock = mocker.patch.object(GroupResourceApiAdapter, "add_users_to_group") + user_ids = [USER_ID, "user2@orkes.io"] + await authorization_client.add_users_to_group_validated(GROUP_ID, user_ids) + mock.assert_called_with(group_id=GROUP_ID, request_body=user_ids) + + +@pytest.mark.asyncio +async def test_get_users_in_group_validated(mocker, authorization_client, conductor_user_adapter): + mock = mocker.patch.object(GroupResourceApiAdapter, "get_users_in_group") + user_dict = { + "id": USER_ID, + "name": USER_NAME, + "uuid": USER_UUID, + "roles": [{"name": "USER", "permissions": []}], + "applicationUser": False, + "encryptedId": False, + "encryptedIdDisplayValue": USER_ID, + } + mock.return_value = [user_dict] + users = await authorization_client.get_users_in_group_validated(GROUP_ID) + mock.assert_called_with(id=GROUP_ID) + assert len(users) == 1 + assert users[0].name == USER_NAME + assert users[0].id == USER_ID + + +@pytest.mark.asyncio +async def test_grant_permissions_validated(mocker, authorization_client): + mock = mocker.patch.object(AuthorizationResourceApiAdapter, "grant_permissions") + auth_request = AuthorizationRequestAdapter( + subject=SubjectRefAdapter(type=SubjectType.USER, id=USER_ID), + target=TargetRefAdapter(type=TargetType.WORKFLOW_DEF, id=WF_NAME), + access=["READ", "EXECUTE"], + ) + await authorization_client.grant_permissions_validated(auth_request) + mock.assert_called_with(authorization_request=auth_request) + + +@pytest.mark.asyncio +async def test_remove_permissions_validated(mocker, authorization_client): + mock = mocker.patch.object(AuthorizationResourceApiAdapter, "remove_permissions") + auth_request = AuthorizationRequestAdapter( + subject=SubjectRefAdapter(type=SubjectType.USER, id=USER_ID), + target=TargetRefAdapter(type=TargetType.WORKFLOW_DEF, id=WF_NAME), + access=["READ", "EXECUTE"], + ) + await authorization_client.remove_permissions_validated(auth_request) + mock.assert_called_with(authorization_request=auth_request) + + +@pytest.mark.asyncio +async def test_get_permissions_validated(mocker, authorization_client): + mock = mocker.patch.object(AuthorizationResourceApiAdapter, "get_permissions") + mock.return_value = { + "EXECUTE": [ + {"type": "USER", "id": USER_ID}, + ], + "READ": [ + {"type": "USER", "id": USER_ID}, + {"type": "GROUP", "id": GROUP_ID}, + ], + } + target = TargetRefAdapter(type=TargetType.WORKFLOW_DEF, id=WF_NAME) + permissions = await authorization_client.get_permissions_validated(target) + mock.assert_called_with(type=TargetType.WORKFLOW_DEF, id=WF_NAME) + assert "EXECUTE" in permissions + assert "READ" in permissions + assert len(permissions["EXECUTE"]) == 1 + assert len(permissions["READ"]) == 2 + assert permissions["EXECUTE"][0].id == USER_ID + assert permissions["READ"][0].id == USER_ID + assert permissions["READ"][1].id == GROUP_ID + + +@pytest.mark.asyncio +async def test_upsert_user_validated(mocker, authorization_client, conductor_user_adapter): + mock = mocker.patch.object(authorization_client, "create_user_validated") + upsert_req = UpsertUserRequestAdapter(name=USER_NAME, roles=["ADMIN"]) + mock.return_value = conductor_user_adapter + user = await authorization_client.upsert_user(USER_ID, upsert_req) + mock.assert_called_with(USER_ID, upsert_req) + assert user == conductor_user_adapter + + +@pytest.mark.asyncio +async def test_create_access_key_validated(mocker, authorization_client): + mock = mocker.patch.object(ApplicationResourceApiAdapter, "create_access_key") + key_dict = { + "id": "test-key-id", + "secret": "test-secret", + } + mock.return_value = key_dict + key = await authorization_client.create_access_key_validated(APP_ID) + mock.assert_called_with(id=APP_ID) + assert key.id == "test-key-id" + assert key.secret == "test-secret" + + +@pytest.mark.asyncio +async def test_get_access_keys_validated(mocker, authorization_client): + mock = mocker.patch.object(ApplicationResourceApiAdapter, "get_access_keys") + keys_list = [ + {"id": "key1", "createdAt": 1698926045112, "status": "ACTIVE"}, + {"id": "key2", "createdAt": 1699100552620, "status": "ACTIVE"}, + ] + mock.return_value = keys_list + keys = await authorization_client.get_access_keys_validated(APP_ID) + mock.assert_called_with(APP_ID) + assert len(keys) == 2 + assert keys[0].id == "key1" + assert keys[1].id == "key2" + + +@pytest.mark.asyncio +async def test_toggle_access_key_status_validated(mocker, authorization_client): + mock = mocker.patch.object(ApplicationResourceApiAdapter, "toggle_access_key_status") + key_dict = { + "id": "test-key-id", + "createdAt": 1698926045112, + "status": "INACTIVE", + } + mock.return_value = key_dict + key = await authorization_client.toggle_access_key_status_validated(APP_ID, "test-key-id") + mock.assert_called_with(application_id=APP_ID, key_id="test-key-id") + assert key.id == "test-key-id" + + +@pytest.mark.asyncio +async def test_check_permissions(mocker, authorization_client): + mock = mocker.patch.object(UserResourceApiAdapter, "check_permissions") + permissions_result = { + "READ": True, + "EXECUTE": False, + "UPDATE": True, + } + mock.return_value = permissions_result + result = await authorization_client.check_permissions(USER_ID, "WORKFLOW_DEF", WF_NAME) + mock.assert_called_with(user_id=USER_ID, type="WORKFLOW_DEF", id=WF_NAME) + assert result["READ"] is True + assert result["EXECUTE"] is False + assert result["UPDATE"] is True diff --git a/tests/unit/orkes/test_async_event_client.py b/tests/unit/orkes/test_async_event_client.py new file mode 100644 index 000000000..352bfab5c --- /dev/null +++ b/tests/unit/orkes/test_async_event_client.py @@ -0,0 +1,257 @@ +import logging +import pytest + +from conductor.asyncio_client.adapters import ApiClient +from conductor.asyncio_client.adapters.api.event_resource_api import EventResourceApiAdapter +from conductor.asyncio_client.adapters.models.connectivity_test_input_adapter import ( + ConnectivityTestInputAdapter, +) +from conductor.asyncio_client.adapters.models.connectivity_test_result_adapter import ( + ConnectivityTestResultAdapter, +) +from conductor.asyncio_client.adapters.models.event_handler_adapter import EventHandlerAdapter +from conductor.asyncio_client.adapters.models.tag_adapter import TagAdapter +from conductor.asyncio_client.configuration.configuration import Configuration +from conductor.asyncio_client.orkes.orkes_event_client import OrkesEventClient + +EVENT_NAME = "workflow:completed" +HANDLER_NAME = "test_handler" +QUEUE_TYPE = "kafka" +QUEUE_NAME = "test_queue" + + +@pytest.fixture(scope="module") +def event_client(): + configuration = Configuration("http://localhost:8080/api") + api_client = ApiClient(configuration) + return OrkesEventClient(configuration, api_client=api_client) + + +@pytest.fixture(scope="module") +def event_handler(): + return EventHandlerAdapter( + name=HANDLER_NAME, + event=EVENT_NAME, + active=True, + ) + + +@pytest.fixture(scope="module") +def tag_list(): + return [ + TagAdapter(key="env", value="prod"), + TagAdapter(key="team", value="platform"), + ] + + +@pytest.fixture(autouse=True) +def disable_logging(): + logging.disable(logging.CRITICAL) + yield + logging.disable(logging.NOTSET) + + +def test_init(event_client): + message = "event_api is not of type EventResourceApiAdapter" + assert isinstance(event_client._event_api, EventResourceApiAdapter), message + + +@pytest.mark.asyncio +async def test_create_event_handler(mocker, event_client, event_handler): + mock = mocker.patch.object(EventResourceApiAdapter, "add_event_handler") + await event_client.create_event_handler([event_handler]) + mock.assert_called_with(event_handler=[event_handler]) + + +@pytest.mark.asyncio +async def test_get_event_handler(mocker, event_client, event_handler): + mock = mocker.patch.object(EventResourceApiAdapter, "get_event_handler_by_name") + mock.return_value = event_handler + result = await event_client.get_event_handler(HANDLER_NAME) + mock.assert_called_with(name=HANDLER_NAME) + assert result == event_handler + + +@pytest.mark.asyncio +async def test_list_event_handlers(mocker, event_client, event_handler): + mock = mocker.patch.object(EventResourceApiAdapter, "get_event_handlers") + mock.return_value = [event_handler] + result = await event_client.list_event_handlers() + assert mock.called + assert result == [event_handler] + + +@pytest.mark.asyncio +async def test_list_event_handlers_for_event(mocker, event_client, event_handler): + mock = mocker.patch.object(EventResourceApiAdapter, "get_event_handlers_for_event") + mock.return_value = [event_handler] + result = await event_client.list_event_handlers_for_event(EVENT_NAME) + mock.assert_called_with(event=EVENT_NAME) + assert result == [event_handler] + + +@pytest.mark.asyncio +async def test_update_event_handler(mocker, event_client, event_handler): + mock = mocker.patch.object(EventResourceApiAdapter, "update_event_handler") + await event_client.update_event_handler(event_handler) + mock.assert_called_with(event_handler=event_handler) + + +@pytest.mark.asyncio +async def test_delete_event_handler(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "remove_event_handler_status") + await event_client.delete_event_handler(HANDLER_NAME) + mock.assert_called_with(name=HANDLER_NAME) + + +@pytest.mark.asyncio +async def test_get_event_handler_tags(mocker, event_client, tag_list): + mock = mocker.patch.object(EventResourceApiAdapter, "get_tags_for_event_handler") + mock.return_value = tag_list + result = await event_client.get_event_handler_tags(HANDLER_NAME) + mock.assert_called_with(name=HANDLER_NAME) + assert result == tag_list + + +@pytest.mark.asyncio +async def test_add_event_handler_tag(mocker, event_client, tag_list): + mock = mocker.patch.object(EventResourceApiAdapter, "put_tag_for_event_handler") + await event_client.add_event_handler_tag(HANDLER_NAME, tag_list) + mock.assert_called_with(name=HANDLER_NAME, tag=tag_list) + + +@pytest.mark.asyncio +async def test_remove_event_handler_tag(mocker, event_client, tag_list): + mock = mocker.patch.object(EventResourceApiAdapter, "delete_tag_for_event_handler") + await event_client.remove_event_handler_tag(HANDLER_NAME, tag_list) + mock.assert_called_with(name=HANDLER_NAME, tag=tag_list) + + +@pytest.mark.asyncio +async def test_get_queue_configuration(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "get_queue_config") + config = {"bootstrapServers": "localhost:9092", "topic": "workflow_events"} + mock.return_value = config + result = await event_client.get_queue_configuration(QUEUE_TYPE, QUEUE_NAME) + mock.assert_called_with(queue_type=QUEUE_TYPE, queue_name=QUEUE_NAME) + assert result == config + + +@pytest.mark.asyncio +async def test_delete_queue_configuration(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "delete_queue_config") + await event_client.delete_queue_configuration(QUEUE_TYPE, QUEUE_NAME) + mock.assert_called_with(queue_type=QUEUE_TYPE, queue_name=QUEUE_NAME) + + +@pytest.mark.asyncio +async def test_get_queue_names(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "get_queue_names") + queue_names = {"kafka": "workflow_events", "sqs": "task_events"} + mock.return_value = queue_names + result = await event_client.get_queue_names() + assert mock.called + assert result == queue_names + + +@pytest.mark.asyncio +async def test_handle_incoming_event(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "handle_incoming_event") + request_body = {"event": {"type": "workflow.completed", "data": {}}} + await event_client.handle_incoming_event(request_body) + mock.assert_called_with(request_body=request_body) + + +@pytest.mark.asyncio +async def test_put_queue_configuration(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "put_queue_config") + body = '{"bootstrapServers": "localhost:9092"}' + await event_client.put_queue_configuration(QUEUE_TYPE, QUEUE_NAME, body) + mock.assert_called_with(queue_type=QUEUE_TYPE, queue_name=QUEUE_NAME, body=body) + + +@pytest.mark.asyncio +async def test_test_method(mocker, event_client, event_handler): + mock = mocker.patch.object(EventResourceApiAdapter, "test") + mock.return_value = event_handler + result = await event_client.test() + assert mock.called + assert result == event_handler + + +@pytest.mark.asyncio +async def test_test_connectivity(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "test_connectivity") + test_input = ConnectivityTestInputAdapter( + connection_name="test_connection", + sink="test_sink" + ) + test_result = ConnectivityTestResultAdapter( + success=True, + message="Connection successful" + ) + mock.return_value = test_result + result = await event_client.test_connectivity(test_input) + mock.assert_called_with(connectivity_test_input=test_input) + assert result == test_result + + +@pytest.mark.asyncio +async def test_create_event_handler_multiple(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "add_event_handler") + handlers = [ + EventHandlerAdapter(name="handler1", event=EVENT_NAME, active=True), + EventHandlerAdapter(name="handler2", event=EVENT_NAME, active=False), + ] + await event_client.create_event_handler(handlers) + mock.assert_called_with(event_handler=handlers) + + +@pytest.mark.asyncio +async def test_list_event_handlers_empty(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "get_event_handlers") + mock.return_value = [] + result = await event_client.list_event_handlers() + assert mock.called + assert result == [] + + +@pytest.mark.asyncio +async def test_list_event_handlers_for_event_empty(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "get_event_handlers_for_event") + mock.return_value = [] + result = await event_client.list_event_handlers_for_event(EVENT_NAME) + mock.assert_called_with(event=EVENT_NAME) + assert result == [] + + +@pytest.mark.asyncio +async def test_add_event_handler_tag_empty_list(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "put_tag_for_event_handler") + await event_client.add_event_handler_tag(HANDLER_NAME, []) + mock.assert_called_with(name=HANDLER_NAME, tag=[]) + + +@pytest.mark.asyncio +async def test_remove_event_handler_tag_empty_list(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "delete_tag_for_event_handler") + await event_client.remove_event_handler_tag(HANDLER_NAME, []) + mock.assert_called_with(name=HANDLER_NAME, tag=[]) + + +@pytest.mark.asyncio +async def test_get_event_handler_tags_empty(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "get_tags_for_event_handler") + mock.return_value = [] + result = await event_client.get_event_handler_tags(HANDLER_NAME) + mock.assert_called_with(name=HANDLER_NAME) + assert result == [] + + +@pytest.mark.asyncio +async def test_get_queue_names_empty(mocker, event_client): + mock = mocker.patch.object(EventResourceApiAdapter, "get_queue_names") + mock.return_value = {} + result = await event_client.get_queue_names() + assert mock.called + assert result == {} diff --git a/tests/unit/orkes/test_async_integration_client.py b/tests/unit/orkes/test_async_integration_client.py index 478215249..8f6ae7702 100644 --- a/tests/unit/orkes/test_async_integration_client.py +++ b/tests/unit/orkes/test_async_integration_client.py @@ -119,7 +119,7 @@ async def test_save_integration_provider( INTEGRATION_NAME, integration_update ) assert mock.called - mock.assert_called_with(INTEGRATION_NAME, integration_update) + mock.assert_called_with(name=INTEGRATION_NAME, integration_update=integration_update) @pytest.mark.asyncio @@ -131,7 +131,7 @@ async def test_get_integration_provider(mocker, integration_client, integration_ ) result = await integration_client.get_integration_provider(INTEGRATION_NAME) assert mock.called - mock.assert_called_with(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) assert result == integration_def @@ -142,7 +142,7 @@ async def test_delete_integration_provider(mocker, integration_client): ) await integration_client.delete_integration_provider(INTEGRATION_NAME) assert mock.called - mock.assert_called_with(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) @pytest.mark.asyncio @@ -197,7 +197,8 @@ async def test_save_integration_api(mocker, integration_client, integration_api_ ) assert mock.called mock.assert_called_with( - INTEGRATION_API_NAME, INTEGRATION_NAME, integration_api_update + name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME, + integration_api_update=integration_api_update, ) @@ -212,7 +213,7 @@ async def test_get_integration_api(mocker, integration_client, integration_api): INTEGRATION_API_NAME, INTEGRATION_NAME ) assert mock.called - mock.assert_called_with(INTEGRATION_API_NAME, INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) assert result == integration_api @@ -223,7 +224,7 @@ async def test_delete_integration_api(mocker, integration_client): INTEGRATION_API_NAME, INTEGRATION_NAME ) assert mock.called - mock.assert_called_with(INTEGRATION_API_NAME, INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) @pytest.mark.asyncio @@ -235,7 +236,7 @@ async def test_get_integration_apis(mocker, integration_client, integration_api) ) result = await integration_client.get_integration_apis(INTEGRATION_NAME) assert mock.called - mock.assert_called_with(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) assert result == [integration_api] @@ -250,7 +251,7 @@ async def test_get_integration_available_apis( ) result = await integration_client.get_integration_available_apis(INTEGRATION_NAME) assert mock.called - mock.assert_called_with(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) assert result == [integration_api] @@ -259,7 +260,7 @@ async def test_save_all_integrations(mocker, integration_client, integration_upd mock = mocker.patch.object(IntegrationResourceApiAdapter, "save_all_integrations") await integration_client.save_all_integrations([integration_update]) assert mock.called - mock.assert_called_with([integration_update]) + mock.assert_called_with(integration=[integration_update]) @pytest.mark.asyncio @@ -372,7 +373,7 @@ async def test_put_tag_for_integration_provider(mocker, integration_client, tag) ) await integration_client.put_tag_for_integration_provider([tag], INTEGRATION_NAME) assert mock.called - mock.assert_called_with(INTEGRATION_NAME, [tag]) + mock.assert_called_with(name=INTEGRATION_NAME, tag=[tag]) @pytest.mark.asyncio @@ -386,7 +387,7 @@ async def test_get_tags_for_integration_provider(mocker, integration_client, tag INTEGRATION_NAME ) assert mock.called - mock.assert_called_with(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) assert result == [tag] @@ -399,7 +400,7 @@ async def test_delete_tag_for_integration_provider(mocker, integration_client, t [tag], INTEGRATION_NAME ) assert mock.called - mock.assert_called_with(INTEGRATION_NAME,[tag]) + mock.assert_called_with(name=INTEGRATION_NAME, tag=[tag]) @pytest.mark.asyncio @@ -414,7 +415,7 @@ async def test_get_token_usage_for_integration(mocker, integration_client): INTEGRATION_API_NAME, INTEGRATION_NAME ) assert mock.called - mock.assert_called_with(INTEGRATION_API_NAME, INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) assert result == expected_usage @@ -430,7 +431,7 @@ async def test_get_token_usage_for_integration_provider(mocker, integration_clie INTEGRATION_NAME ) assert mock.called - mock.assert_called_with(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) assert result == expected_usage @@ -442,7 +443,7 @@ async def test_register_token_usage(mocker, integration_client): INTEGRATION_API_NAME, INTEGRATION_NAME, tokens ) assert mock.called - mock.assert_called_with(INTEGRATION_API_NAME, INTEGRATION_NAME, tokens) + mock.assert_called_with(name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME, body=tokens) @pytest.mark.asyncio @@ -454,7 +455,7 @@ async def test_associate_prompt_with_integration(mocker, integration_client): AI_PROMPT, INTEGRATION_PROVIDER, INTEGRATION_NAME ) assert mock.called - mock.assert_called_with(AI_PROMPT, INTEGRATION_PROVIDER, INTEGRATION_NAME) + mock.assert_called_with(prompt_name=AI_PROMPT, integration_provider=INTEGRATION_PROVIDER, integration_name=INTEGRATION_NAME) @pytest.mark.asyncio @@ -472,7 +473,7 @@ async def test_get_prompts_with_integration(mocker, integration_client): INTEGRATION_PROVIDER, INTEGRATION_NAME ) assert mock.called - mock.assert_called_with(INTEGRATION_PROVIDER, INTEGRATION_NAME) + mock.assert_called_with(integration_provider=INTEGRATION_PROVIDER, integration_name=INTEGRATION_NAME) assert result == expected_prompts @@ -540,3 +541,29 @@ async def test_get_active_integration_providers( assert mock.called mock.assert_called_with(category=None, active_only=True) assert result == [integration_def] + + +@pytest.mark.asyncio +async def test_save_integration(mocker, integration_client, integration_update): + mock = mocker.patch.object(IntegrationResourceApiAdapter, "save_integration_provider") + await integration_client.save_integration(INTEGRATION_NAME, integration_update) + mock.assert_called_with(name=INTEGRATION_NAME, integration_update=integration_update) + + +@pytest.mark.asyncio +async def test_get_integration(mocker, integration_client, integration): + mock = mocker.patch.object(integration_client, "get_integration_provider") + mock.return_value = integration + result = await integration_client.get_integration(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) + assert result == integration + + +@pytest.mark.asyncio +async def test_get_integration_not_found(mocker, integration_client): + from conductor.asyncio_client.http.exceptions import NotFoundException + mock = mocker.patch.object(integration_client, "get_integration_provider") + mock.side_effect = NotFoundException() + result = await integration_client.get_integration(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) + assert result is None diff --git a/tests/unit/orkes/test_async_metadata_client.py b/tests/unit/orkes/test_async_metadata_client.py index 50d591011..fd2a8596b 100644 --- a/tests/unit/orkes/test_async_metadata_client.py +++ b/tests/unit/orkes/test_async_metadata_client.py @@ -6,7 +6,7 @@ from conductor.asyncio_client.adapters.api.metadata_resource_api import ( MetadataResourceApiAdapter, ) -from conductor.asyncio_client.adapters.api.tags_api import TagsApi +from conductor.asyncio_client.adapters.api.tags_api import TagsApiAdapter from conductor.asyncio_client.adapters.models.extended_task_def_adapter import ( ExtendedTaskDefAdapter, ) @@ -88,9 +88,9 @@ def wf_tag_obj(): def test_init(metadata_client): message = "metadata_api is not of type MetadataResourceApiAdapter" - assert isinstance(metadata_client.metadata_api, MetadataResourceApiAdapter), message + assert isinstance(metadata_client._metadata_api, MetadataResourceApiAdapter), message message = "tags_api is not of type TagsApi" - assert isinstance(metadata_client.tags_api, TagsApi), message + assert isinstance(metadata_client._tags_api, TagsApiAdapter), message @pytest.mark.asyncio @@ -136,7 +136,7 @@ async def test_unregister_workflow_def(mocker, metadata_client): mock = mocker.patch.object(MetadataResourceApiAdapter, "unregister_workflow_def") await metadata_client.unregister_workflow_def(WORKFLOW_NAME, 1) assert mock.called - mock.assert_called_with(WORKFLOW_NAME, 1) + mock.assert_called_with(name=WORKFLOW_NAME, version=1) @pytest.mark.asyncio @@ -146,7 +146,7 @@ async def test_get_workflow_def_without_version(mocker, metadata_client, workflo wf = await metadata_client.get_workflow_def(WORKFLOW_NAME) assert wf == workflow_def assert mock.called - mock.assert_called_with(WORKFLOW_NAME, version=None, metadata=None) + mock.assert_called_with(name=WORKFLOW_NAME, version=None, metadata=None) @pytest.mark.asyncio @@ -155,7 +155,7 @@ async def test_get_workflow_def_with_version(mocker, metadata_client, workflow_d mock.return_value = workflow_def wf = await metadata_client.get_workflow_def(WORKFLOW_NAME, version=1) assert wf == workflow_def - mock.assert_called_with(WORKFLOW_NAME, version=1, metadata=None) + mock.assert_called_with(name=WORKFLOW_NAME, version=1, metadata=None) @pytest.mark.asyncio @@ -200,7 +200,7 @@ async def test_update_task_def(mocker, metadata_client, extended_task_def): mock = mocker.patch.object(MetadataResourceApiAdapter, "update_task_def") await metadata_client.update_task_def(extended_task_def) assert mock.called - mock.assert_called_with(extended_task_def) + mock.assert_called_with(extended_task_def=extended_task_def) @pytest.mark.asyncio @@ -208,7 +208,7 @@ async def test_unregister_task_def(mocker, metadata_client): mock = mocker.patch.object(MetadataResourceApiAdapter, "unregister_task_def") await metadata_client.unregister_task_def(TASK_NAME) assert mock.called - mock.assert_called_with(TASK_NAME) + mock.assert_called_with(tasktype=TASK_NAME) @pytest.mark.asyncio @@ -283,7 +283,7 @@ async def test_get_latest_workflow_def(mocker, metadata_client, workflow_def): mock.return_value = workflow_def wf = await metadata_client.get_latest_workflow_def(WORKFLOW_NAME) assert wf == workflow_def - mock.assert_called_with(WORKFLOW_NAME, version=None, metadata=None) + mock.assert_called_with(name=WORKFLOW_NAME, version=None, metadata=None) @pytest.mark.asyncio @@ -292,7 +292,7 @@ async def test_get_workflow_def_with_metadata(mocker, metadata_client, workflow_ mock.return_value = workflow_def wf = await metadata_client.get_workflow_def_with_metadata(WORKFLOW_NAME) assert wf == workflow_def - mock.assert_called_with(WORKFLOW_NAME, version=None, metadata=True) + mock.assert_called_with(name=WORKFLOW_NAME, version=None, metadata=True) @pytest.mark.asyncio @@ -462,7 +462,7 @@ async def test_get_workflow_def_latest_version(mocker, metadata_client, workflow mock.return_value = workflow_def wf = await metadata_client.get_workflow_def_latest_version(WORKFLOW_NAME) assert wf == workflow_def - mock.assert_called_with(WORKFLOW_NAME, version=None, metadata=None) + mock.assert_called_with(name=WORKFLOW_NAME, version=None, metadata=None) @pytest.mark.asyncio @@ -482,7 +482,7 @@ async def test_get_workflow_def_by_version(mocker, metadata_client, workflow_def mock.return_value = workflow_def wf = await metadata_client.get_workflow_def_by_version(WORKFLOW_NAME, 1) assert wf == workflow_def - mock.assert_called_with(WORKFLOW_NAME, version=1, metadata=None) + mock.assert_called_with(name=WORKFLOW_NAME, version=1, metadata=None) @pytest.mark.asyncio @@ -499,3 +499,66 @@ async def test_get_workflow_def_by_name(mocker, metadata_client, workflow_def): tag_value=None, ) assert len(workflows) == 1 + + +@pytest.mark.asyncio +async def test_register_task_def_validated(mocker, metadata_client, extended_task_def): + mock = mocker.patch.object(MetadataResourceApiAdapter, "register_task_def") + await metadata_client.register_task_def_validated([extended_task_def]) + mock.assert_called_with(extended_task_def=[extended_task_def]) + + +@pytest.mark.asyncio +async def test_get_task_def_validated(mocker, metadata_client, task_def): + mock = mocker.patch.object(MetadataResourceApiAdapter, "get_task_def") + task_dict = { + "name": TASK_NAME, + "timeoutSeconds": 1, + "totalTimeoutSeconds": 1, + } + mock.return_value = task_dict + task = await metadata_client.get_task_def_validated(TASK_NAME) + mock.assert_called_with(tasktype=TASK_NAME) + assert task.name == TASK_NAME + + +@pytest.mark.asyncio +async def test_create_workflow_def_validated(mocker, metadata_client, extended_workflow_def): + mock = mocker.patch.object(MetadataResourceApiAdapter, "create") + await metadata_client.create_workflow_def_validated(extended_workflow_def) + mock.assert_called_with(extended_workflow_def, overwrite=None, new_version=None) + + +@pytest.mark.asyncio +async def test_create_workflow_def_validated_with_params(mocker, metadata_client, extended_workflow_def): + mock = mocker.patch.object(MetadataResourceApiAdapter, "create") + await metadata_client.create_workflow_def_validated(extended_workflow_def, overwrite=True, new_version=False) + mock.assert_called_with(extended_workflow_def, overwrite=True, new_version=False) + + +@pytest.mark.asyncio +async def test_update_workflow_defs_validated(mocker, metadata_client, extended_workflow_def): + mock = mocker.patch.object(MetadataResourceApiAdapter, "update") + await metadata_client.update_workflow_defs_validated([extended_workflow_def]) + mock.assert_called_with(extended_workflow_def=[extended_workflow_def], overwrite=None, new_version=None) + + +@pytest.mark.asyncio +async def test_update_workflow_defs_validated_with_params(mocker, metadata_client, extended_workflow_def): + mock = mocker.patch.object(MetadataResourceApiAdapter, "update") + await metadata_client.update_workflow_defs_validated([extended_workflow_def], overwrite=True, new_version=False) + mock.assert_called_with(extended_workflow_def=[extended_workflow_def], overwrite=True, new_version=False) + + +@pytest.mark.asyncio +async def test_register_workflow_def_validated(mocker, metadata_client, extended_workflow_def): + mock = mocker.patch.object(metadata_client, "create_workflow_def_validated") + await metadata_client.register_workflow_def_validated(extended_workflow_def) + mock.assert_called_with(extended_workflow_def=extended_workflow_def, overwrite=False) + + +@pytest.mark.asyncio +async def test_update_workflow_def_validated(mocker, metadata_client, extended_workflow_def): + mock = mocker.patch.object(metadata_client, "create_workflow_def_validated") + await metadata_client.update_workflow_def_validated(extended_workflow_def) + mock.assert_called_with(extended_workflow_def=extended_workflow_def, overwrite=True) diff --git a/tests/unit/orkes/test_async_prompt_client.py b/tests/unit/orkes/test_async_prompt_client.py index 1d3a3fa4c..43ca3e79a 100644 --- a/tests/unit/orkes/test_async_prompt_client.py +++ b/tests/unit/orkes/test_async_prompt_client.py @@ -61,7 +61,7 @@ def tag(): def test_init(prompt_client): message = "prompt_api is not of type PromptResourceApiAdapter" - assert isinstance(prompt_client.prompt_api, PromptResourceApiAdapter), message + assert isinstance(prompt_client._prompt_api, PromptResourceApiAdapter), message @pytest.mark.asyncio @@ -72,7 +72,7 @@ async def test_save_message_template(mocker, prompt_client): ) assert mock.called mock.assert_called_with( - TEMPLATE_NAME, TEMPLATE_DESCRIPTION, TEMPLATE_BODY, models=[MODEL_NAME] + name=TEMPLATE_NAME, description=TEMPLATE_DESCRIPTION, body=TEMPLATE_BODY, models=[MODEL_NAME] ) @@ -84,7 +84,7 @@ async def test_save_message_template_without_models(mocker, prompt_client): ) assert mock.called mock.assert_called_with( - TEMPLATE_NAME, TEMPLATE_DESCRIPTION, TEMPLATE_BODY, models=None + name=TEMPLATE_NAME, description=TEMPLATE_DESCRIPTION, body=TEMPLATE_BODY, models=None ) @@ -97,7 +97,7 @@ async def test_get_message_template(mocker, prompt_client, message_template): ) result = await prompt_client.get_message_template(TEMPLATE_NAME) assert mock.called - mock.assert_called_with(TEMPLATE_NAME) + mock.assert_called_with(name=TEMPLATE_NAME) assert result == message_template @@ -119,7 +119,7 @@ async def test_delete_message_template(mocker, prompt_client): mock = mocker.patch.object(PromptResourceApiAdapter, "delete_message_template") await prompt_client.delete_message_template(TEMPLATE_NAME) assert mock.called - mock.assert_called_with(TEMPLATE_NAME) + mock.assert_called_with(name=TEMPLATE_NAME) @pytest.mark.asyncio @@ -127,7 +127,7 @@ async def test_create_message_templates(mocker, prompt_client, message_template) mock = mocker.patch.object(PromptResourceApiAdapter, "create_message_templates") await prompt_client.create_message_templates([message_template]) assert mock.called - mock.assert_called_with([message_template]) + mock.assert_called_with(message_template=[message_template]) @pytest.mark.asyncio @@ -142,7 +142,7 @@ async def test_test_message_template( ) result = await prompt_client.test_message_template(prompt_template_test_request) assert mock.called - mock.assert_called_with(prompt_template_test_request) + mock.assert_called_with(prompt_template_test_request=prompt_template_test_request) assert result == expected_result @@ -163,7 +163,7 @@ async def test_get_tags_for_prompt_template(mocker, prompt_client, tag): ) result = await prompt_client.get_tags_for_prompt_template(TEMPLATE_NAME) assert mock.called - mock.assert_called_with(TEMPLATE_NAME) + mock.assert_called_with(name=TEMPLATE_NAME) assert result == [tag] @@ -174,7 +174,7 @@ async def test_delete_tag_for_prompt_template(mocker, prompt_client, tag): ) await prompt_client.delete_tag_for_prompt_template(TEMPLATE_NAME, [tag]) assert mock.called - mock.assert_called_with(TEMPLATE_NAME, [tag]) + mock.assert_called_with(name=TEMPLATE_NAME, tag=[tag]) @pytest.mark.asyncio @@ -185,7 +185,7 @@ async def test_create_simple_template(mocker, prompt_client): ) assert mock.called mock.assert_called_with( - TEMPLATE_NAME, TEMPLATE_DESCRIPTION, TEMPLATE_BODY, models=None + name=TEMPLATE_NAME, description=TEMPLATE_DESCRIPTION, body=TEMPLATE_BODY, models=None ) @@ -197,7 +197,7 @@ async def test_update_template(mocker, prompt_client): ) assert mock.called mock.assert_called_with( - TEMPLATE_NAME, TEMPLATE_DESCRIPTION, TEMPLATE_BODY, models=[MODEL_NAME] + name=TEMPLATE_NAME, description=TEMPLATE_DESCRIPTION, body=TEMPLATE_BODY, models=[MODEL_NAME] ) @@ -210,7 +210,7 @@ async def test_template_exists_true(mocker, prompt_client, message_template): ) result = await prompt_client.template_exists(TEMPLATE_NAME) assert mock.called - mock.assert_called_with(TEMPLATE_NAME) + mock.assert_called_with(name=TEMPLATE_NAME) assert result is True @@ -223,7 +223,7 @@ async def test_template_exists_false(mocker, prompt_client): ) result = await prompt_client.template_exists(TEMPLATE_NAME) assert mock.called - mock.assert_called_with(TEMPLATE_NAME) + mock.assert_called_with(name=TEMPLATE_NAME) assert result is False @@ -278,12 +278,12 @@ async def test_clone_template(mocker, prompt_client, message_template): await prompt_client.clone_template(TEMPLATE_NAME, target_name) assert mock_get_template.called - mock_get_template.assert_called_with(TEMPLATE_NAME) + mock_get_template.assert_called_with(name=TEMPLATE_NAME) assert mock_save_template.called mock_save_template.assert_called_with( - target_name, - f"Clone of {TEMPLATE_DESCRIPTION}", - TEMPLATE_BODY, + name=target_name, + description=f"Clone of {TEMPLATE_DESCRIPTION}", + body=TEMPLATE_BODY, models=None, ) @@ -304,12 +304,12 @@ async def test_clone_template_with_description(mocker, prompt_client, message_te await prompt_client.clone_template(TEMPLATE_NAME, target_name, new_description) assert mock_get_template.called - mock_get_template.assert_called_with(TEMPLATE_NAME) + mock_get_template.assert_called_with(name=TEMPLATE_NAME) assert mock_save_template.called mock_save_template.assert_called_with( - target_name, - new_description, - TEMPLATE_BODY, + name=target_name, + description=new_description, + body=TEMPLATE_BODY, models=None, ) @@ -324,7 +324,7 @@ async def test_bulk_delete_templates(mocker, prompt_client): await prompt_client.bulk_delete_templates(template_names) assert mock_delete.call_count == 3 - expected_calls = [mocker.call(name) for name in template_names] + expected_calls = [mocker.call(name=name) for name in template_names] mock_delete.assert_has_calls(expected_calls) @@ -348,7 +348,7 @@ async def test_save_prompt(mocker, prompt_client): await prompt_client.save_prompt(TEMPLATE_NAME, TEMPLATE_DESCRIPTION, TEMPLATE_BODY) assert mock.called mock.assert_called_with( - TEMPLATE_NAME, TEMPLATE_DESCRIPTION, TEMPLATE_BODY, models=None + name=TEMPLATE_NAME, description=TEMPLATE_DESCRIPTION, body=TEMPLATE_BODY, models=None ) @@ -361,7 +361,7 @@ async def test_get_prompt(mocker, prompt_client, message_template): ) result = await prompt_client.get_prompt(TEMPLATE_NAME) assert mock.called - mock.assert_called_with(TEMPLATE_NAME) + mock.assert_called_with(name=TEMPLATE_NAME) assert result == message_template @@ -370,7 +370,7 @@ async def test_delete_prompt(mocker, prompt_client): mock = mocker.patch.object(PromptResourceApiAdapter, "delete_message_template") await prompt_client.delete_prompt(TEMPLATE_NAME) assert mock.called - mock.assert_called_with(TEMPLATE_NAME) + mock.assert_called_with(name=TEMPLATE_NAME) @pytest.mark.asyncio @@ -491,3 +491,11 @@ async def test_get_templates_with_model_no_models_attribute(mocker, prompt_clien assert mock.called mock.assert_called_with() assert len(result) == 0 + + +@pytest.mark.asyncio +async def test_update_tag_for_prompt_template(mocker, prompt_client, tag): + mock = mocker.patch.object(PromptResourceApiAdapter, "put_tag_for_prompt_template") + tags = [tag] + await prompt_client.update_tag_for_prompt_template(TEMPLATE_NAME, tags) + mock.assert_called_with(name=TEMPLATE_NAME, tag=tags) diff --git a/tests/unit/orkes/test_async_scheduler_client.py b/tests/unit/orkes/test_async_scheduler_client.py index 8ebbfbb59..23c8d4bfd 100644 --- a/tests/unit/orkes/test_async_scheduler_client.py +++ b/tests/unit/orkes/test_async_scheduler_client.py @@ -54,7 +54,7 @@ def save_schedule_request(): async def test_init(scheduler_client): message = "scheduler_api is not of type SchedulerResourceApiAdapter" assert isinstance( - scheduler_client.scheduler_api, SchedulerResourceApiAdapter + scheduler_client._scheduler_api, SchedulerResourceApiAdapter ), message @@ -73,7 +73,7 @@ async def test_get_schedule(mocker, scheduler_client, workflow_schedule): schedule = await scheduler_client.get_schedule(SCHEDULE_NAME) assert schedule == workflow_schedule assert mock.called - mock.assert_called_with(SCHEDULE_NAME) + mock.assert_called_with(name=SCHEDULE_NAME) @pytest.mark.asyncio @@ -214,7 +214,7 @@ async def test_put_tag_for_schedule(mocker, scheduler_client): tag2 = TagAdapter(key="tag2", value="val2") tags = [tag1, tag2] await scheduler_client.put_tag_for_schedule(SCHEDULE_NAME, tags) - mock.assert_called_with(SCHEDULE_NAME, tags) + mock.assert_called_with(name=SCHEDULE_NAME, tag=tags) @pytest.mark.asyncio @@ -237,3 +237,45 @@ async def test_delete_tag_for_schedule(mocker, scheduler_client): tags = [tag1, tag2] await scheduler_client.delete_tag_for_schedule(SCHEDULE_NAME, tags) mock.assert_called_with(SCHEDULE_NAME, tags) + + +@pytest.mark.asyncio +async def test_save_schedule_validated(mocker, scheduler_client, save_schedule_request): + mock = mocker.patch.object(SchedulerResourceApiAdapter, "save_schedule") + await scheduler_client.save_schedule_validated(save_schedule_request) + mock.assert_called_with(save_schedule_request=save_schedule_request) + + +@pytest.mark.asyncio +async def test_delete_schedule_validated(mocker, scheduler_client): + mock = mocker.patch.object(SchedulerResourceApiAdapter, "delete_schedule") + await scheduler_client.delete_schedule_validated(SCHEDULE_NAME) + mock.assert_called_with(name=SCHEDULE_NAME) + + +@pytest.mark.asyncio +async def test_pause_schedule_validated(mocker, scheduler_client): + mock = mocker.patch.object(SchedulerResourceApiAdapter, "pause_schedule") + await scheduler_client.pause_schedule_validated(SCHEDULE_NAME) + mock.assert_called_with(name=SCHEDULE_NAME) + + +@pytest.mark.asyncio +async def test_resume_schedule_validated(mocker, scheduler_client): + mock = mocker.patch.object(SchedulerResourceApiAdapter, "resume_schedule") + await scheduler_client.resume_schedule_validated(SCHEDULE_NAME) + mock.assert_called_with(name=SCHEDULE_NAME) + + +@pytest.mark.asyncio +async def test_pause_all_schedules_validated(mocker, scheduler_client): + mock = mocker.patch.object(SchedulerResourceApiAdapter, "pause_all_schedules") + await scheduler_client.pause_all_schedules_validated() + assert mock.called + + +@pytest.mark.asyncio +async def test_resume_all_schedules_validated(mocker, scheduler_client): + mock = mocker.patch.object(SchedulerResourceApiAdapter, "resume_all_schedules") + await scheduler_client.resume_all_schedules_validated() + assert mock.called diff --git a/tests/unit/orkes/test_async_schema_client.py b/tests/unit/orkes/test_async_schema_client.py index a4512c750..4e4cbf631 100644 --- a/tests/unit/orkes/test_async_schema_client.py +++ b/tests/unit/orkes/test_async_schema_client.py @@ -40,7 +40,7 @@ def schema_def_adapter(): @pytest.mark.asyncio async def test_init(schema_client): message = "schema_api is not of type SchemaResourceApiAdapter" - assert isinstance(schema_client.schema_api, SchemaResourceApiAdapter), message + assert isinstance(schema_client._schema_api, SchemaResourceApiAdapter), message @pytest.mark.asyncio @@ -70,7 +70,7 @@ async def test_get_schema(mocker, schema_client, schema_def_adapter): mock = mocker.patch.object(SchemaResourceApiAdapter, "get_schema_by_name_and_version") mock.return_value = schema_def_adapter result = await schema_client.get_schema(SCHEMA_NAME, SCHEMA_VERSION) - mock.assert_called_with(SCHEMA_NAME, SCHEMA_VERSION) + mock.assert_called_with(name=SCHEMA_NAME, version=SCHEMA_VERSION) assert result == schema_def_adapter @@ -88,14 +88,14 @@ async def test_get_all_schemas(mocker, schema_client, schema_def_adapter): async def test_delete_schema_by_name_and_version(mocker, schema_client): mock = mocker.patch.object(SchemaResourceApiAdapter, "delete_schema_by_name_and_version") await schema_client.delete_schema_by_name_and_version(SCHEMA_NAME, SCHEMA_VERSION) - mock.assert_called_with(SCHEMA_NAME, SCHEMA_VERSION) + mock.assert_called_with(name=SCHEMA_NAME, version=SCHEMA_VERSION) @pytest.mark.asyncio async def test_delete_schema_by_name(mocker, schema_client): mock = mocker.patch.object(SchemaResourceApiAdapter, "delete_schema_by_name") await schema_client.delete_schema_by_name(SCHEMA_NAME) - mock.assert_called_with(SCHEMA_NAME) + mock.assert_called_with(name=SCHEMA_NAME) @pytest.mark.asyncio @@ -186,7 +186,7 @@ async def test_get_unique_schema_names(mocker, schema_client): async def test_delete_all_schema_versions(mocker, schema_client): mock = mocker.patch.object(schema_client, "delete_schema_by_name") await schema_client.delete_all_schema_versions(SCHEMA_NAME) - mock.assert_called_with(SCHEMA_NAME) + mock.assert_called_with(name=SCHEMA_NAME) @pytest.mark.asyncio @@ -229,14 +229,14 @@ async def test_list_schemas(mocker, schema_client): async def test_delete_schema_with_version(mocker, schema_client): mock = mocker.patch.object(schema_client, "delete_schema_by_name_and_version") await schema_client.delete_schema(SCHEMA_NAME, SCHEMA_VERSION) - mock.assert_called_with(SCHEMA_NAME, SCHEMA_VERSION) + mock.assert_called_with(name=SCHEMA_NAME, version=SCHEMA_VERSION) @pytest.mark.asyncio async def test_delete_schema_without_version(mocker, schema_client): mock = mocker.patch.object(schema_client, "delete_schema_by_name") await schema_client.delete_schema(SCHEMA_NAME) - mock.assert_called_with(SCHEMA_NAME) + mock.assert_called_with(name=SCHEMA_NAME) @pytest.mark.asyncio @@ -246,7 +246,7 @@ async def test_create_schema_version(mocker, schema_client): mock_versions.return_value = [1, 2, 3] schema_definition = {"type": "object", "properties": {"name": {"type": "string"}}} await schema_client.create_schema_version(SCHEMA_NAME, schema_definition, "New version") - mock_create.assert_called_with(SCHEMA_NAME, 4, schema_definition, "New version") + mock_create.assert_called_with(name=SCHEMA_NAME, version=4, schema_definition=schema_definition, schema_type="New version") @pytest.mark.asyncio @@ -256,7 +256,7 @@ async def test_create_schema_version_first_version(mocker, schema_client): mock_versions.return_value = [] schema_definition = {"type": "object", "properties": {"name": {"type": "string"}}} await schema_client.create_schema_version(SCHEMA_NAME, schema_definition, "First version") - mock_create.assert_called_with(SCHEMA_NAME, 1, schema_definition, "First version") + mock_create.assert_called_with(name=SCHEMA_NAME, version=1, schema_definition=schema_definition, schema_type="First version") @pytest.mark.asyncio @@ -265,7 +265,7 @@ async def test_get_schema_api_exception(mocker, schema_client): mock.side_effect = ApiException(status=404, body="Schema not found") with pytest.raises(ApiException): await schema_client.get_schema(SCHEMA_NAME, SCHEMA_VERSION) - mock.assert_called_with(SCHEMA_NAME, SCHEMA_VERSION) + mock.assert_called_with(name=SCHEMA_NAME, version=SCHEMA_VERSION) @pytest.mark.asyncio @@ -283,7 +283,7 @@ async def test_delete_schema_api_exception(mocker, schema_client): mock.side_effect = ApiException(status=404, body="Schema not found") with pytest.raises(ApiException): await schema_client.delete_schema_by_name_and_version(SCHEMA_NAME, SCHEMA_VERSION) - mock.assert_called_with(SCHEMA_NAME, SCHEMA_VERSION) + mock.assert_called_with(name=SCHEMA_NAME, version=SCHEMA_VERSION) @pytest.mark.asyncio @@ -305,3 +305,33 @@ async def test_search_schemas_by_name_case_insensitive(mocker, schema_client): mock.return_value = schemas result = await schema_client.search_schemas_by_name("user") assert result == [schemas[0]] + + +@pytest.mark.asyncio +async def test_register_schema(mocker, schema_client, schema_def_adapter): + mock = mocker.patch.object(schema_client, "register_schemas") + await schema_client.register_schema(schema_def_adapter) + mock.assert_called_with(schema_defs=[schema_def_adapter], new_version=None) + + +@pytest.mark.asyncio +async def test_register_schema_with_new_version(mocker, schema_client, schema_def_adapter): + mock = mocker.patch.object(schema_client, "register_schemas") + await schema_client.register_schema(schema_def_adapter, new_version=True) + mock.assert_called_with(schema_defs=[schema_def_adapter], new_version=True) + + +@pytest.mark.asyncio +async def test_register_schemas(mocker, schema_client, schema_def_adapter): + mock = mocker.patch.object(SchemaResourceApiAdapter, "save") + schemas = [schema_def_adapter] + await schema_client.register_schemas(schemas) + mock.assert_called_with(schema_def=schemas, new_version=None) + + +@pytest.mark.asyncio +async def test_register_schemas_with_new_version(mocker, schema_client, schema_def_adapter): + mock = mocker.patch.object(SchemaResourceApiAdapter, "save") + schemas = [schema_def_adapter] + await schema_client.register_schemas(schemas, new_version=True) + mock.assert_called_with(schema_def=schemas, new_version=True) diff --git a/tests/unit/orkes/test_async_secret_client.py b/tests/unit/orkes/test_async_secret_client.py index e5909e3cf..241e8f2cd 100644 --- a/tests/unit/orkes/test_async_secret_client.py +++ b/tests/unit/orkes/test_async_secret_client.py @@ -53,7 +53,7 @@ def extended_secret(): @pytest.mark.asyncio async def test_init(secret_client): message = "secret_api is not of type SecretResourceApiAdapter" - assert isinstance(secret_client.secret_api, SecretResourceApiAdapter), message + assert isinstance(secret_client._secret_api, SecretResourceApiAdapter), message @pytest.mark.asyncio @@ -70,7 +70,7 @@ async def test_get_secret(mocker, secret_client): mock = mocker.patch.object(SecretResourceApiAdapter, "get_secret") mock.return_value = SECRET_VALUE result = await secret_client.get_secret(SECRET_KEY) - mock.assert_called_with(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) assert result == SECRET_VALUE @@ -144,7 +144,7 @@ async def test_list_secrets_with_tags_that_user_can_grant_access_to( async def test_put_tag_for_secret(mocker, secret_client, tag_list): mock = mocker.patch.object(SecretResourceApiAdapter, "put_tag_for_secret") await secret_client.put_tag_for_secret(SECRET_KEY, tag_list) - mock.assert_called_with(SECRET_KEY, tag_list) + mock.assert_called_with(key=SECRET_KEY, tag=tag_list) @pytest.mark.asyncio @@ -152,7 +152,7 @@ async def test_get_tags(mocker, secret_client, tag_list): mock = mocker.patch.object(SecretResourceApiAdapter, "get_tags") mock.return_value = tag_list result = await secret_client.get_tags(SECRET_KEY) - mock.assert_called_with(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) assert result == tag_list @@ -160,7 +160,7 @@ async def test_get_tags(mocker, secret_client, tag_list): async def test_delete_tag_for_secret(mocker, secret_client, tag_list): mock = mocker.patch.object(SecretResourceApiAdapter, "delete_tag_for_secret") await secret_client.delete_tag_for_secret(SECRET_KEY, tag_list) - mock.assert_called_with(SECRET_KEY, tag_list) + mock.assert_called_with(key=SECRET_KEY, tag=tag_list) @pytest.mark.asyncio @@ -215,7 +215,7 @@ async def test_get_secret_api_exception(mocker, secret_client): mock.side_effect = ApiException(status=404, body=ERROR_BODY) with pytest.raises(ApiException): await secret_client.get_secret(SECRET_KEY) - mock.assert_called_with(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) @pytest.mark.asyncio @@ -242,7 +242,7 @@ async def test_put_tag_for_secret_api_exception(mocker, secret_client, tag_list) mock.side_effect = ApiException(status=400, body="Bad request") with pytest.raises(ApiException): await secret_client.put_tag_for_secret(SECRET_KEY, tag_list) - mock.assert_called_with(SECRET_KEY, tag_list) + mock.assert_called_with(key=SECRET_KEY, tag=tag_list) @pytest.mark.asyncio @@ -251,7 +251,7 @@ async def test_get_tags_api_exception(mocker, secret_client): mock.side_effect = ApiException(status=404, body=ERROR_BODY) with pytest.raises(ApiException): await secret_client.get_tags(SECRET_KEY) - mock.assert_called_with(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) @pytest.mark.asyncio @@ -260,7 +260,7 @@ async def test_delete_tag_for_secret_api_exception(mocker, secret_client, tag_li mock.side_effect = ApiException(status=400, body="Bad request") with pytest.raises(ApiException): await secret_client.delete_tag_for_secret(SECRET_KEY, tag_list) - mock.assert_called_with(SECRET_KEY, tag_list) + mock.assert_called_with(key=SECRET_KEY, tag=tag_list) @pytest.mark.asyncio @@ -303,7 +303,7 @@ async def test_get_secret_empty_list(mocker, secret_client): async def test_put_tag_for_secret_empty_tags(mocker, secret_client): mock = mocker.patch.object(SecretResourceApiAdapter, "put_tag_for_secret") await secret_client.put_tag_for_secret(SECRET_KEY, []) - mock.assert_called_with(SECRET_KEY, []) + mock.assert_called_with(key=SECRET_KEY, tag=[]) @pytest.mark.asyncio @@ -345,7 +345,7 @@ async def test_get_tags_with_multiple_tags(mocker, secret_client): ] mock.return_value = multiple_tags result = await secret_client.get_tags(SECRET_KEY) - mock.assert_called_with(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) assert result == multiple_tags @@ -353,14 +353,14 @@ async def test_get_tags_with_multiple_tags(mocker, secret_client): async def test_put_tag_for_secret_single_tag(mocker, secret_client, tag_adapter): mock = mocker.patch.object(SecretResourceApiAdapter, "put_tag_for_secret") await secret_client.put_tag_for_secret(SECRET_KEY, [tag_adapter]) - mock.assert_called_with(SECRET_KEY, [tag_adapter]) + mock.assert_called_with(key=SECRET_KEY, tag=[tag_adapter]) @pytest.mark.asyncio async def test_delete_tag_for_secret_single_tag(mocker, secret_client, tag_adapter): mock = mocker.patch.object(SecretResourceApiAdapter, "delete_tag_for_secret") await secret_client.delete_tag_for_secret(SECRET_KEY, [tag_adapter]) - mock.assert_called_with(SECRET_KEY, [tag_adapter]) + mock.assert_called_with(key=SECRET_KEY, tag=[tag_adapter]) @pytest.mark.asyncio @@ -385,3 +385,74 @@ async def test_list_secrets_with_tags_that_user_can_grant_access_to_empty( result = await secret_client.list_secrets_with_tags_that_user_can_grant_access_to() assert mock.called assert result == [] + + +@pytest.mark.asyncio +async def test_put_secret_validated(mocker, secret_client): + mock = mocker.patch.object(SecretResourceApiAdapter, "put_secret") + await secret_client.put_secret_validated(SECRET_KEY, SECRET_VALUE) + mock.assert_called_with(key=SECRET_KEY, body=SECRET_VALUE) + + +@pytest.mark.asyncio +async def test_delete_secret_validated(mocker, secret_client): + mock = mocker.patch.object(SecretResourceApiAdapter, "delete_secret") + await secret_client.delete_secret_validated(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) + + +@pytest.mark.asyncio +async def test_secret_exists_validated_true(mocker, secret_client): + mock = mocker.patch.object(SecretResourceApiAdapter, "secret_exists") + mock.return_value = True + result = await secret_client.secret_exists_validated(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) + assert result is True + + +@pytest.mark.asyncio +async def test_secret_exists_validated_false(mocker, secret_client): + mock = mocker.patch.object(SecretResourceApiAdapter, "secret_exists") + mock.return_value = False + result = await secret_client.secret_exists_validated(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) + assert result is False + + +@pytest.mark.asyncio +async def test_clear_local_cache_validated(mocker, secret_client): + mock = mocker.patch.object(SecretResourceApiAdapter, "clear_local_cache") + await secret_client.clear_local_cache_validated() + assert mock.called + + +@pytest.mark.asyncio +async def test_clear_redis_cache_validated(mocker, secret_client): + mock = mocker.patch.object(SecretResourceApiAdapter, "clear_redis_cache") + await secret_client.clear_redis_cache_validated() + assert mock.called + + +@pytest.mark.asyncio +async def test_update_secret_validated(mocker, secret_client): + mock = mocker.patch.object(secret_client, "put_secret_validated") + await secret_client.update_secret_validated(SECRET_KEY, SECRET_VALUE) + mock.assert_called_with(key=SECRET_KEY, secret=SECRET_VALUE) + + +@pytest.mark.asyncio +async def test_has_secret_validated_true(mocker, secret_client): + mock = mocker.patch.object(secret_client, "secret_exists_validated") + mock.return_value = True + result = await secret_client.has_secret_validated(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) + assert result is True + + +@pytest.mark.asyncio +async def test_has_secret_validated_false(mocker, secret_client): + mock = mocker.patch.object(secret_client, "secret_exists_validated") + mock.return_value = False + result = await secret_client.has_secret_validated(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) + assert result is False diff --git a/tests/unit/orkes/test_async_task_client.py b/tests/unit/orkes/test_async_task_client.py index 3040b9a4e..5b3a83743 100644 --- a/tests/unit/orkes/test_async_task_client.py +++ b/tests/unit/orkes/test_async_task_client.py @@ -76,7 +76,7 @@ def poll_data_adapter(): @pytest.mark.asyncio async def test_init(task_client): message = "task_api is not of type TaskResourceApiAdapter" - assert isinstance(task_client.task_api, TaskResourceApiAdapter), message + assert isinstance(task_client._task_api, TaskResourceApiAdapter), message @pytest.mark.asyncio @@ -482,3 +482,51 @@ async def test_get_all_poll_data_with_parameters(mocker, task_client): last_poll_time_opt="desc" ) assert result == expected_data + + +@pytest.mark.asyncio +async def test_poll_task(mocker, task_client, task_adapter): + mock = mocker.patch.object(TaskResourceApiAdapter, "poll") + mock.return_value = task_adapter + result = await task_client.poll_task(TASK_NAME, WORKER_ID) + mock.assert_called_with(tasktype=TASK_NAME, workerid=WORKER_ID, domain=None) + assert result == task_adapter + + +@pytest.mark.asyncio +async def test_poll_task_with_domain(mocker, task_client, task_adapter): + mock = mocker.patch.object(TaskResourceApiAdapter, "poll") + mock.return_value = task_adapter + result = await task_client.poll_task(TASK_NAME, WORKER_ID, DOMAIN) + mock.assert_called_with(tasktype=TASK_NAME, workerid=WORKER_ID, domain=DOMAIN) + assert result == task_adapter + + +@pytest.mark.asyncio +async def test_batch_poll_tasks(mocker, task_client, task_adapter): + mock = mocker.patch.object(TaskResourceApiAdapter, "batch_poll") + mock.return_value = [task_adapter] + result = await task_client.batch_poll_tasks(TASK_NAME, WORKER_ID, 3, 200) + mock.assert_called_with( + tasktype=TASK_NAME, + workerid=WORKER_ID, + count=3, + timeout=200, + domain=None + ) + assert result == [task_adapter] + + +@pytest.mark.asyncio +async def test_batch_poll_tasks_with_domain(mocker, task_client, task_adapter): + mock = mocker.patch.object(TaskResourceApiAdapter, "batch_poll") + mock.return_value = [task_adapter] + result = await task_client.batch_poll_tasks(TASK_NAME, WORKER_ID, 3, 200, DOMAIN) + mock.assert_called_with( + tasktype=TASK_NAME, + workerid=WORKER_ID, + count=3, + timeout=200, + domain=DOMAIN + ) + assert result == [task_adapter] diff --git a/tests/unit/orkes/test_async_workflow_client.py b/tests/unit/orkes/test_async_workflow_client.py index 2d668bf18..a464b43d7 100644 --- a/tests/unit/orkes/test_async_workflow_client.py +++ b/tests/unit/orkes/test_async_workflow_client.py @@ -44,7 +44,7 @@ def workflow_input(): @pytest.mark.asyncio async def test_init(workflow_client): message = "workflowResourceApi is not of type WorkflowResourceApiAdapter" - assert isinstance(workflow_client.workflow_api, WorkflowResourceApiAdapter), message + assert isinstance(workflow_client._workflow_api, WorkflowResourceApiAdapter), message @pytest.mark.asyncio diff --git a/tests/unit/orkes/test_authorization_client.py b/tests/unit/orkes/test_authorization_client.py index b6913449a..2f1fc5905 100644 --- a/tests/unit/orkes/test_authorization_client.py +++ b/tests/unit/orkes/test_authorization_client.py @@ -138,13 +138,13 @@ def disable_logging(): def test_init(authorization_client): message = "applicationResourceApi is not of type ApplicationResourceApi" - assert isinstance(authorization_client.applicationResourceApi, ApplicationResourceApi), message + assert isinstance(authorization_client._application_api, ApplicationResourceApi), message message = "userResourceApi is not of type UserResourceApi" - assert isinstance(authorization_client.userResourceApi, UserResourceApi), message + assert isinstance(authorization_client._user_api, UserResourceApi), message message = "groupResourceApi is not of type GroupResourceApi" - assert isinstance(authorization_client.groupResourceApi, GroupResourceApi), message + assert isinstance(authorization_client._group_api, GroupResourceApi), message message = "authorizationResourceApi is not of type AuthorizationResourceApi" - assert isinstance(authorization_client.authorizationResourceApi, AuthorizationResourceApi), ( + assert isinstance(authorization_client._authorization_api, AuthorizationResourceApi), ( message ) @@ -161,7 +161,7 @@ def test_create_application(mocker, authorization_client, conductor_application) "updateTime": 1699236095031, } app = authorization_client.create_application(createReq) - mock.assert_called_with(createReq) + mock.assert_called_with(body=createReq) assert app == conductor_application @@ -177,7 +177,7 @@ def test_get_application(mocker, authorization_client, conductor_application): "updateTime": 1699236095031, } app = authorization_client.get_application(APP_ID) - mock.assert_called_with(APP_ID) + mock.assert_called_with(id=APP_ID) assert app == conductor_application @@ -192,7 +192,7 @@ def test_list_applications(mocker, authorization_client, conductor_application): def test_delete_application(mocker, authorization_client): mock = mocker.patch.object(ApplicationResourceApi, "delete_application") authorization_client.delete_application(APP_ID) - mock.assert_called_with(APP_ID) + mock.assert_called_with(id=APP_ID) def test_update_application(mocker, authorization_client, conductor_application): @@ -206,21 +206,21 @@ def test_update_application(mocker, authorization_client, conductor_application) "createTime": 1699236095031, "updateTime": 1699236095031, } - app = authorization_client.update_application(APP_ID, updateReq) + app = authorization_client.update_application(updateReq, APP_ID) assert app == conductor_application - mock.assert_called_with(APP_ID, updateReq) + mock.assert_called_with(id=APP_ID, body=updateReq) def test_add_role_to_application_user(mocker, authorization_client): mock = mocker.patch.object(ApplicationResourceApi, "add_role_to_application_user") authorization_client.add_role_to_application_user(APP_ID, "USER") - mock.assert_called_with(APP_ID, "USER") + mock.assert_called_with(application_id=APP_ID, role="USER") def test_remove_role_from_application_user(mocker, authorization_client): mock = mocker.patch.object(ApplicationResourceApi, "remove_role_from_application_user") authorization_client.remove_role_from_application_user(APP_ID, "USER") - mock.assert_called_with(APP_ID, "USER") + mock.assert_called_with(application_id=APP_ID, role="USER") def test_set_application_tags(mocker, authorization_client, conductor_application): @@ -229,7 +229,7 @@ def test_set_application_tags(mocker, authorization_client, conductor_applicatio tag2 = MetadataTag("tag2", "val2") tags = [tag1, tag2] authorization_client.set_application_tags(tags, APP_ID) - mock.assert_called_with(tags, APP_ID) + mock.assert_called_with(tag=tags, id=APP_ID) def test_get_application_tags(mocker, authorization_client, conductor_application): @@ -239,7 +239,7 @@ def test_get_application_tags(mocker, authorization_client, conductor_applicatio tag2 = MetadataTag("tag2", "val2") mock.return_value = [tag1, tag2] tags = authorization_client.get_application_tags(APP_ID) - mock.assert_called_with(APP_ID) + mock.assert_called_with(application_id=APP_ID) assert len(tags) == expected_application_tags_len @@ -249,7 +249,7 @@ def test_delete_application_tags(mocker, authorization_client, conductor_applica tag2 = MetadataTag("tag2", "val2") tags = [tag1, tag2] authorization_client.delete_application_tags(tags, APP_ID) - mock.assert_called_with(tags, APP_ID) + mock.assert_called_with(tag=tags, id=APP_ID) def test_create_access_key(mocker, authorization_client, access_key): @@ -259,7 +259,7 @@ def test_create_access_key(mocker, authorization_client, access_key): "secret": ACCESS_KEY_SECRET, } created_key = authorization_client.create_access_key(APP_ID) - mock.assert_called_with(APP_ID) + mock.assert_called_with(id=APP_ID) assert created_key == access_key @@ -278,7 +278,7 @@ def test_get_access_keys(mocker, authorization_client, app_keys): }, ] access_keys = authorization_client.get_access_keys(APP_ID) - mock.assert_called_with(APP_ID) + mock.assert_called_with(id=APP_ID) assert access_keys == app_keys @@ -290,22 +290,22 @@ def test_toggle_access_key_status(mocker, authorization_client, access_key): "status": "INACTIVE", } access_key = authorization_client.toggle_access_key_status(APP_ID, ACCESS_KEY_ID) - mock.assert_called_with(APP_ID, ACCESS_KEY_ID) + mock.assert_called_with(application_id=APP_ID, key_id=ACCESS_KEY_ID) assert access_key.status == AccessKeyStatus.INACTIVE def test_delete_access_key(mocker, authorization_client): mock = mocker.patch.object(ApplicationResourceApi, "delete_access_key") authorization_client.delete_access_key(APP_ID, ACCESS_KEY_ID) - mock.assert_called_with(APP_ID, ACCESS_KEY_ID) + mock.assert_called_with(application_id=APP_ID, key_id=ACCESS_KEY_ID) def test_upsert_user(mocker, authorization_client, conductor_user, roles): mock = mocker.patch.object(UserResourceApi, "upsert_user") upsertReq = UpsertUserRequest(USER_NAME, ["ADMIN"]) mock.return_value = conductor_user.to_dict() - user = authorization_client.upsert_user(USER_ID, upsertReq) - mock.assert_called_with(USER_ID, upsertReq) + user = authorization_client.upsert_user(upsertReq, USER_ID) + mock.assert_called_with(id=USER_ID, upsert_user_request=upsertReq) assert user.name == USER_NAME assert user.id == USER_ID assert user.uuid == USER_UUID @@ -317,7 +317,7 @@ def test_upsert_user_with_empty_string(mocker, authorization_client, conductor_u mock = mocker.patch.object(UserResourceApi, "upsert_user") upsert_req = UpsertUserRequest(USER_NAME, ["ADMIN"]) mock.return_value = conductor_user.to_dict() - authorization_client.upsert_user(upsert_req, "") + authorization_client.upsert_user(upsert_req,"") mock.assert_called_with(id=None, body=upsert_req) @@ -325,7 +325,7 @@ def test_get_user(mocker, authorization_client, conductor_user, roles): mock = mocker.patch.object(UserResourceApi, "get_user") mock.return_value = conductor_user.to_dict() user = authorization_client.get_user(USER_ID) - mock.assert_called_with(USER_ID) + mock.assert_called_with(id=USER_ID) assert user.name == USER_NAME assert user.id == USER_ID assert user.uuid == USER_UUID @@ -359,7 +359,7 @@ def test_list_users(mocker, authorization_client, conductor_user): def test_delete_user(mocker, authorization_client): mock = mocker.patch.object(UserResourceApi, "delete_user") authorization_client.delete_user(USER_ID) - mock.assert_called_with(USER_ID) + mock.assert_called_with(id=USER_ID) def test_delete_user_with_empty_string(mocker, authorization_client): @@ -373,8 +373,8 @@ def test_upsert_group(mocker, authorization_client, conductor_group, group_roles mock = mocker.patch.object(GroupResourceApi, "upsert_group") upsertReq = UpsertGroupRequest(GROUP_NAME, ["USER"]) mock.return_value = conductor_group.to_dict() - group = authorization_client.upsert_group(GROUP_ID, upsertReq) - mock.assert_called_with(GROUP_ID, upsertReq) + group = authorization_client.upsert_group(upsertReq, GROUP_ID) + mock.assert_called_with(id=GROUP_ID, body=upsertReq) assert group == conductor_group assert group.description == GROUP_NAME assert group.id == GROUP_ID @@ -385,7 +385,7 @@ def test_get_group(mocker, authorization_client, conductor_group, group_roles): mock = mocker.patch.object(GroupResourceApi, "get_group") mock.return_value = conductor_group.to_dict() group = authorization_client.get_group(GROUP_ID) - mock.assert_called_with(GROUP_ID) + mock.assert_called_with(id=GROUP_ID) assert group == conductor_group assert group.description == GROUP_NAME assert group.id == GROUP_ID @@ -403,21 +403,21 @@ def test_list_groups(mocker, authorization_client, conductor_group): def test_delete_group(mocker, authorization_client): mock = mocker.patch.object(GroupResourceApi, "delete_group") authorization_client.delete_group(GROUP_ID) - mock.assert_called_with(GROUP_ID) + mock.assert_called_with(id=GROUP_ID) def test_add_user_to_group(mocker, authorization_client, conductor_group): mock = mocker.patch.object(GroupResourceApi, "add_user_to_group") mock.return_value = conductor_group authorization_client.add_user_to_group(GROUP_ID, USER_ID) - mock.assert_called_with(GROUP_ID, USER_ID) + mock.assert_called_with(group_id=GROUP_ID, user_id=USER_ID) def test_get_users_in_group(mocker, authorization_client, conductor_user, roles): mock = mocker.patch.object(GroupResourceApi, "get_users_in_group") mock.return_value = [conductor_user.to_dict()] users = authorization_client.get_users_in_group(GROUP_ID) - mock.assert_called_with(GROUP_ID) + mock.assert_called_with(id=GROUP_ID) assert len(users) == 1 assert users[0].name == USER_NAME assert users[0].id == USER_ID @@ -428,7 +428,7 @@ def test_get_users_in_group(mocker, authorization_client, conductor_user, roles) def test_remove_user_from_group(mocker, authorization_client): mock = mocker.patch.object(GroupResourceApi, "remove_user_from_group") authorization_client.remove_user_from_group(GROUP_ID, USER_ID) - mock.assert_called_with(GROUP_ID, USER_ID) + mock.assert_called_with(group_id=GROUP_ID, user_id=USER_ID) def test_get_granted_permissions_for_group(mocker, authorization_client): @@ -443,7 +443,7 @@ def test_get_granted_permissions_for_group(mocker, authorization_client): ) perms = authorization_client.get_granted_permissions_for_group(GROUP_ID) - mock.assert_called_with(GROUP_ID) + mock.assert_called_with(group_id=GROUP_ID) expected_perm = GrantedPermission( target=TargetRef(WF_NAME, TargetType.WORKFLOW_DEF.value), access=["EXECUTE", "UPDATE", "READ"], @@ -469,7 +469,7 @@ def test_get_granted_permissions_for_user(mocker, authorization_client): ] } perms = authorization_client.get_granted_permissions_for_user(USER_ID) - mock.assert_called_with(USER_ID) + mock.assert_called_with(user_id=USER_ID) expected_perm = GrantedPermission( target=TargetRef(id=WF_NAME, type=TargetType.WORKFLOW_DEF.value), access=["EXECUTE", "UPDATE", "READ"], @@ -497,7 +497,7 @@ def test_get_permissions(mocker, authorization_client): ], } permissions = authorization_client.get_permissions(TargetRef(WF_NAME, TargetType.WORKFLOW_DEF)) - mock.assert_called_with(TargetType.WORKFLOW_DEF.name, "workflow_name") + mock.assert_called_with(type=TargetType.WORKFLOW_DEF.name, id="workflow_name") expected_permissions_dict = { AccessType.EXECUTE.name: [ SubjectRef(USER_ID, SubjectType.USER), @@ -516,7 +516,7 @@ def test_grant_permissions(mocker, authorization_client): target = TargetRef(WF_NAME, TargetType.WORKFLOW_DEF) access = [AccessType.READ, AccessType.EXECUTE] authorization_client.grant_permissions(subject, target, access) - mock.assert_called_with(AuthorizationRequest(subject, target, access)) + mock.assert_called_with(body=AuthorizationRequest(subject, target, access)) def test_remove_permissions(mocker, authorization_client): @@ -525,7 +525,7 @@ def test_remove_permissions(mocker, authorization_client): target = TargetRef(WF_NAME, TargetType.WORKFLOW_DEF) access = [AccessType.READ, AccessType.EXECUTE] authorization_client.remove_permissions(subject, target, access) - mock.assert_called_with(AuthorizationRequest(subject, target, access)) + mock.assert_called_with(body=AuthorizationRequest(subject, target, access)) def test_create_access_key_empty_string_converts_to_none(mocker, authorization_client): @@ -637,3 +637,18 @@ def test_delete_tag_for_application_empty_strings_convert_to_none(mocker, author mock = mocker.patch.object(ApplicationResourceApi, "delete_tag_for_application") authorization_client.delete_application_tags([], "") mock.assert_called_with(None, None) + + +def test_check_permissions(mocker, authorization_client): + mock = mocker.patch.object(UserResourceApi, "check_permissions") + permissions_result = { + "READ": True, + "EXECUTE": False, + "UPDATE": True, + } + mock.return_value = permissions_result + result = authorization_client.check_permissions(USER_ID, "WORKFLOW_DEF", WF_NAME) + mock.assert_called_with(user_id=USER_ID, type="WORKFLOW_DEF", id=WF_NAME) + assert result["READ"] is True + assert result["EXECUTE"] is False + assert result["UPDATE"] is True diff --git a/tests/unit/orkes/test_event_client.py b/tests/unit/orkes/test_event_client.py new file mode 100644 index 000000000..db5da1053 --- /dev/null +++ b/tests/unit/orkes/test_event_client.py @@ -0,0 +1,230 @@ +import logging +import pytest + +from conductor.client.configuration.configuration import Configuration +from conductor.client.http.api.event_resource_api import EventResourceApi +from conductor.client.adapters.models.event_handler_adapter import EventHandlerAdapter +from conductor.client.adapters.models.tag_adapter import TagAdapter +from conductor.client.http.models.connectivity_test_input import ConnectivityTestInput +from conductor.client.http.models.connectivity_test_result import ConnectivityTestResult +from conductor.client.orkes.orkes_event_client import OrkesEventClient + +EVENT_NAME = "workflow:completed" +HANDLER_NAME = "test_handler" +QUEUE_TYPE = "kafka" +QUEUE_NAME = "test_queue" + + +@pytest.fixture(scope="module") +def event_client(): + configuration = Configuration("http://localhost:8080/api") + return OrkesEventClient(configuration) + + +@pytest.fixture(scope="module") +def event_handler(): + return EventHandlerAdapter( + name=HANDLER_NAME, + event=EVENT_NAME, + active=True, + ) + + +@pytest.fixture(scope="module") +def tag_list(): + return [ + TagAdapter(key="env", value="prod"), + TagAdapter(key="team", value="platform"), + ] + + +@pytest.fixture(autouse=True) +def disable_logging(): + logging.disable(logging.CRITICAL) + yield + logging.disable(logging.NOTSET) + + +def test_init(event_client): + message = "event_api is not of type EventResourceApi" + assert isinstance(event_client._event_api, EventResourceApi), message + + +def test_create_event_handler(mocker, event_client, event_handler): + mock = mocker.patch.object(EventResourceApi, "add_event_handler") + event_client.create_event_handler([event_handler]) + mock.assert_called_with([event_handler]) + + +def test_get_event_handler(mocker, event_client, event_handler): + mock = mocker.patch.object(EventResourceApi, "get_event_handler_by_name") + mock.return_value = event_handler + result = event_client.get_event_handler(HANDLER_NAME) + mock.assert_called_with(name=HANDLER_NAME) + assert result == event_handler + + +def test_list_event_handlers(mocker, event_client, event_handler): + mock = mocker.patch.object(EventResourceApi, "get_event_handlers") + mock.return_value = [event_handler] + result = event_client.list_event_handlers() + assert mock.called + assert result == [event_handler] + + +def test_list_event_handlers_for_event(mocker, event_client, event_handler): + mock = mocker.patch.object(EventResourceApi, "get_event_handlers_for_event") + mock.return_value = [event_handler] + result = event_client.list_event_handlers_for_event(EVENT_NAME) + mock.assert_called_with(event=EVENT_NAME) + assert result == [event_handler] + + +def test_update_event_handler(mocker, event_client, event_handler): + mock = mocker.patch.object(EventResourceApi, "update_event_handler") + event_client.update_event_handler(event_handler) + mock.assert_called_with(event_handler) + + +def test_delete_event_handler(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "remove_event_handler_status") + event_client.delete_event_handler(HANDLER_NAME) + mock.assert_called_with(name=HANDLER_NAME) + + +def test_get_event_handler_tags(mocker, event_client, tag_list): + mock = mocker.patch.object(EventResourceApi, "get_tags_for_event_handler") + mock.return_value = tag_list + result = event_client.get_event_handler_tags(HANDLER_NAME) + mock.assert_called_with(name=HANDLER_NAME) + assert result == tag_list + + +def test_add_event_handler_tag(mocker, event_client, tag_list): + mock = mocker.patch.object(EventResourceApi, "put_tag_for_event_handler") + event_client.add_event_handler_tag(HANDLER_NAME, tag_list) + mock.assert_called_with(tag_list, HANDLER_NAME) + + +def test_remove_event_handler_tag(mocker, event_client, tag_list): + mock = mocker.patch.object(EventResourceApi, "delete_tag_for_event_handler") + event_client.remove_event_handler_tag(HANDLER_NAME, tag_list) + mock.assert_called_with(tag_list, HANDLER_NAME) + + +def test_get_queue_configuration(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "get_queue_config") + config = {"bootstrapServers": "localhost:9092", "topic": "workflow_events"} + mock.return_value = config + result = event_client.get_queue_configuration(QUEUE_TYPE, QUEUE_NAME) + mock.assert_called_with(queue_type=QUEUE_TYPE, queue_name=QUEUE_NAME) + assert result == config + + +def test_delete_queue_configuration(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "delete_queue_config") + event_client.delete_queue_configuration(QUEUE_TYPE, QUEUE_NAME) + mock.assert_called_with(queue_type=QUEUE_TYPE, queue_name=QUEUE_NAME) + + +def test_get_queue_names(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "get_queue_names") + queue_names = {"kafka": "workflow_events", "sqs": "task_events"} + mock.return_value = queue_names + result = event_client.get_queue_names() + assert mock.called + assert result == queue_names + + +def test_handle_incoming_event(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "handle_incoming_event") + request_body = {"event": {"type": "workflow.completed", "data": {}}} + event_client.handle_incoming_event(request_body) + mock.assert_called_with(request_body) + + +def test_put_queue_config(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "put_queue_config") + body = '{"bootstrapServers": "localhost:9092"}' + event_client.put_queue_config(body, QUEUE_TYPE, QUEUE_NAME) + mock.assert_called_with(body, QUEUE_TYPE, QUEUE_NAME) + + +def test_test_method(mocker, event_client): + from conductor.client.http.models.event_handler import EventHandler + mock = mocker.patch.object(EventResourceApi, "test") + event_handler = EventHandler() + mock.return_value = event_handler + result = event_client.test() + assert mock.called + assert result == event_handler + + +def test_test_connectivity(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "test_connectivity") + test_input = ConnectivityTestInput( + input="test_connection", + sink="test_sink" + ) + test_result = ConnectivityTestResult( + successful=True, + reason="Connection successful" + ) + mock.return_value = test_result + result = event_client.test_connectivity(test_input) + mock.assert_called_with(test_input) + assert result == test_result + + +def test_create_event_handler_multiple(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "add_event_handler") + handlers = [ + EventHandlerAdapter(name="handler1", event=EVENT_NAME, active=True), + EventHandlerAdapter(name="handler2", event=EVENT_NAME, active=False), + ] + event_client.create_event_handler(handlers) + mock.assert_called_with(handlers) + + +def test_list_event_handlers_empty(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "get_event_handlers") + mock.return_value = [] + result = event_client.list_event_handlers() + assert mock.called + assert result == [] + + +def test_list_event_handlers_for_event_empty(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "get_event_handlers_for_event") + mock.return_value = [] + result = event_client.list_event_handlers_for_event(EVENT_NAME) + mock.assert_called_with(event=EVENT_NAME) + assert result == [] + + +def test_add_event_handler_tag_empty_list(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "put_tag_for_event_handler") + event_client.add_event_handler_tag(HANDLER_NAME, []) + mock.assert_called_with([], HANDLER_NAME) + + +def test_remove_event_handler_tag_empty_list(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "delete_tag_for_event_handler") + event_client.remove_event_handler_tag(HANDLER_NAME, []) + mock.assert_called_with([], HANDLER_NAME) + + +def test_get_event_handler_tags_empty(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "get_tags_for_event_handler") + mock.return_value = [] + result = event_client.get_event_handler_tags(HANDLER_NAME) + mock.assert_called_with(name=HANDLER_NAME) + assert result == [] + + +def test_get_queue_names_empty(mocker, event_client): + mock = mocker.patch.object(EventResourceApi, "get_queue_names") + mock.return_value = {} + result = event_client.get_queue_names() + assert mock.called + assert result == {} diff --git a/tests/unit/orkes/test_integration_client.py b/tests/unit/orkes/test_integration_client.py new file mode 100644 index 000000000..2db92fbf1 --- /dev/null +++ b/tests/unit/orkes/test_integration_client.py @@ -0,0 +1,283 @@ +import logging +import pytest + +from conductor.client.codegen.rest import ApiException +from conductor.client.configuration.configuration import Configuration +from conductor.client.http.api.integration_resource_api import IntegrationResourceApi +from conductor.client.http.models.integration import Integration +from conductor.client.http.models.integration_api import IntegrationApi +from conductor.client.http.models.integration_api_update import IntegrationApiUpdate +from conductor.client.http.models.integration_def import IntegrationDef +from conductor.client.http.models.integration_update import IntegrationUpdate +from conductor.client.http.models.message_template import MessageTemplate +from conductor.client.http.models.tag import Tag +from conductor.client.orkes.orkes_integration_client import OrkesIntegrationClient + +INTEGRATION_NAME = "test_integration" +INTEGRATION_API_NAME = "test_api" +AI_PROMPT = "test_prompt" +MODEL_NAME = "test_model" +AI_INTEGRATION = "test_ai_integration" + + +@pytest.fixture(scope="module") +def integration_client(): + configuration = Configuration("http://localhost:8080/api") + return OrkesIntegrationClient(configuration) + + +@pytest.fixture(scope="module") +def integration(): + return Integration() + + +@pytest.fixture(scope="module") +def integration_update(): + return IntegrationUpdate() + + +@pytest.fixture(scope="module") +def integration_api(): + return IntegrationApi() + + +@pytest.fixture(scope="module") +def integration_api_update(): + return IntegrationApiUpdate() + + +@pytest.fixture(scope="module") +def integration_def(): + return IntegrationDef() + + +@pytest.fixture(scope="module") +def tag_list(): + return [Tag(key="env", value="prod"), Tag(key="team", value="platform")] + + +@pytest.fixture(autouse=True) +def disable_logging(): + logging.disable(logging.CRITICAL) + yield + logging.disable(logging.NOTSET) + + +def test_init(integration_client): + message = "integration_api is not of type IntegrationResourceApi" + assert isinstance(integration_client._integration_api, IntegrationResourceApi), message + + +def test_save_integration_provider(mocker, integration_client, integration_update): + mock = mocker.patch.object(IntegrationResourceApi, "save_integration_provider") + integration_client.save_integration_provider(INTEGRATION_NAME, integration_update) + mock.assert_called_with(body=integration_update, name=INTEGRATION_NAME) + + +def test_save_integration(mocker, integration_client, integration_update): + mock = mocker.patch.object(IntegrationResourceApi, "save_integration_provider") + integration_client.save_integration(INTEGRATION_NAME, integration_update) + mock.assert_called_with(body=integration_update, name=INTEGRATION_NAME) + + +def test_get_integration_provider(mocker, integration_client, integration): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_provider") + mock.return_value = integration + result = integration_client.get_integration_provider(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) + assert result == integration + + +def test_get_integration_provider_not_found(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_provider") + api_exception = ApiException(status=404) + mock.side_effect = api_exception + result = integration_client.get_integration_provider(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) + assert result is None + + +def test_get_integration(mocker, integration_client, integration): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_provider") + mock.return_value = integration + result = integration_client.get_integration(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) + assert result == integration + + +def test_get_integration_not_found(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_provider") + api_exception = ApiException(status=404) + mock.side_effect = api_exception + result = integration_client.get_integration(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) + assert result is None + + +def test_get_integration_providers(mocker, integration_client, integration): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_providers") + mock.return_value = [integration] + result = integration_client.get_integration_providers() + assert mock.called + assert result == [integration] + + +def test_get_integration_providers_with_filters(mocker, integration_client, integration): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_providers") + mock.return_value = [integration] + result = integration_client.get_integration_providers(category="API", active_only=True) + mock.assert_called_with(category="API", active_only=True) + assert result == [integration] + + +def test_get_integration_provider_defs(mocker, integration_client, integration_def): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_provider_defs") + mock.return_value = [integration_def] + result = integration_client.get_integration_provider_defs() + assert mock.called + assert result == [integration_def] + + +def test_delete_integration(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "delete_integration_provider") + integration_client.delete_integration(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) + + +def test_save_integration_api(mocker, integration_client, integration_api_update): + mock = mocker.patch.object(IntegrationResourceApi, "save_integration_api") + integration_client.save_integration_api(INTEGRATION_NAME, INTEGRATION_API_NAME, integration_api_update) + mock.assert_called_with(body=integration_api_update, name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) + + +def test_get_integration_api(mocker, integration_client, integration_api): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_api") + mock.return_value = integration_api + result = integration_client.get_integration_api(INTEGRATION_API_NAME, INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) + assert result == integration_api + + +def test_get_integration_api_not_found(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_api") + api_exception = ApiException(status=404) + mock.side_effect = api_exception + result = integration_client.get_integration_api(INTEGRATION_API_NAME, INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) + assert result is None + + +def test_delete_integration_api(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "delete_integration_api") + integration_client.delete_integration_api(INTEGRATION_API_NAME, INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) + + +def test_get_integration_apis(mocker, integration_client, integration_api): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_apis") + mock.return_value = [integration_api] + result = integration_client.get_integration_apis(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) + assert result == [integration_api] + + +def test_get_integrations(mocker, integration_client, integration): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_providers") + mock.return_value = [integration] + result = integration_client.get_integrations() + assert mock.called + assert result == [integration] + + +def test_associate_prompt_with_integration(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "associate_prompt_with_integration") + integration_client.associate_prompt_with_integration(AI_INTEGRATION, MODEL_NAME, AI_PROMPT) + mock.assert_called_with(integration_provider=AI_INTEGRATION, integration_name=MODEL_NAME, prompt_name=AI_PROMPT) + + +def test_get_prompts_with_integration(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "get_prompts_with_integration") + message_template = MessageTemplate() + mock.return_value = [message_template] + result = integration_client.get_prompts_with_integration(AI_INTEGRATION, MODEL_NAME) + mock.assert_called_with(integration_provider=AI_INTEGRATION, integration_name=MODEL_NAME) + assert result == [message_template] + + +def test_get_token_usage_for_integration(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "get_token_usage_for_integration") + mock.return_value = 1000 + result = integration_client.get_token_usage_for_integration(INTEGRATION_API_NAME, INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) + assert result == 1000 + + +def test_get_token_usage_for_integration_provider(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "get_token_usage_for_integration_provider") + expected_usage = {"total": "5000", "monthly": "1000"} + mock.return_value = expected_usage + result = integration_client.get_token_usage_for_integration_provider(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) + assert result == expected_usage + + +def test_register_token_usage(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "register_token_usage") + integration_client.register_token_usage(500, INTEGRATION_API_NAME, INTEGRATION_NAME) + mock.assert_called_with(body=500, name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) + + +def test_delete_tag_for_integration(mocker, integration_client, tag_list): + mock = mocker.patch.object(IntegrationResourceApi, "delete_tag_for_integration") + integration_client.delete_tag_for_integration(tag_list, INTEGRATION_API_NAME, INTEGRATION_NAME) + mock.assert_called_with(body=tag_list, name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) + + +def test_delete_tag_for_integration_provider(mocker, integration_client, tag_list): + mock = mocker.patch.object(IntegrationResourceApi, "delete_tag_for_integration_provider") + integration_client.delete_tag_for_integration_provider(tag_list, INTEGRATION_NAME) + mock.assert_called_with(body=tag_list, name=INTEGRATION_NAME) + + +def test_get_tags_for_integration(mocker, integration_client, tag_list): + mock = mocker.patch.object(IntegrationResourceApi, "get_tags_for_integration") + mock.return_value = tag_list + result = integration_client.get_tags_for_integration(INTEGRATION_API_NAME, INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) + assert result == tag_list + + +def test_get_tags_for_integration_provider(mocker, integration_client, tag_list): + mock = mocker.patch.object(IntegrationResourceApi, "get_tags_for_integration_provider") + mock.return_value = tag_list + result = integration_client.get_tags_for_integration_provider(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) + assert result == tag_list + + +def test_put_tag_for_integration(mocker, integration_client, tag_list): + mock = mocker.patch.object(IntegrationResourceApi, "put_tag_for_integration") + integration_client.put_tag_for_integration(tag_list, INTEGRATION_API_NAME, INTEGRATION_NAME) + mock.assert_called_with(body=tag_list, name=INTEGRATION_API_NAME, integration_name=INTEGRATION_NAME) + + +def test_put_tag_for_integration_provider(mocker, integration_client, tag_list): + mock = mocker.patch.object(IntegrationResourceApi, "put_tag_for_integration_provider") + integration_client.put_tag_for_integration_provider(tag_list, INTEGRATION_NAME) + mock.assert_called_with(body=tag_list, name=INTEGRATION_NAME) + + +def test_get_integration_providers_empty(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_providers") + mock.return_value = [] + result = integration_client.get_integration_providers() + assert mock.called + assert result == [] + + +def test_get_integration_apis_empty(mocker, integration_client): + mock = mocker.patch.object(IntegrationResourceApi, "get_integration_apis") + mock.return_value = [] + result = integration_client.get_integration_apis(INTEGRATION_NAME) + mock.assert_called_with(name=INTEGRATION_NAME) + assert result == [] diff --git a/tests/unit/orkes/test_metadata_client.py b/tests/unit/orkes/test_metadata_client.py index 1f5d9c4f7..47ba9038b 100644 --- a/tests/unit/orkes/test_metadata_client.py +++ b/tests/unit/orkes/test_metadata_client.py @@ -48,7 +48,7 @@ def wf_tag_obj(): def test_init(metadata_client): message = "metadataResourceApi is not of type MetadataResourceApi" - assert isinstance(metadata_client.metadataResourceApi, MetadataResourceApi), message + assert isinstance(metadata_client._metadata_api, MetadataResourceApi), message def test_register_workflow_def(mocker, metadata_client, workflow_def): @@ -83,7 +83,7 @@ def test_unregister_workflow_def(mocker, metadata_client): mock = mocker.patch.object(MetadataResourceApi, "unregister_workflow_def") metadata_client.unregister_workflow_def(WORKFLOW_NAME, 1) assert mock.called - mock.assert_called_with(WORKFLOW_NAME, 1) + mock.assert_called_with(name=WORKFLOW_NAME, version=1) def test_get_workflow_def_without_version(mocker, metadata_client, workflow_def): @@ -92,7 +92,7 @@ def test_get_workflow_def_without_version(mocker, metadata_client, workflow_def) wf = metadata_client.get_workflow_def(WORKFLOW_NAME) assert wf == workflow_def assert mock.called - mock.assert_called_with(WORKFLOW_NAME) + mock.assert_called_with(name=WORKFLOW_NAME) def test_get_workflow_def_with_version(mocker, metadata_client, workflow_def): @@ -100,7 +100,7 @@ def test_get_workflow_def_with_version(mocker, metadata_client, workflow_def): mock.return_value = workflow_def wf = metadata_client.get_workflow_def(WORKFLOW_NAME, 1) assert wf == workflow_def - mock.assert_called_with(WORKFLOW_NAME, version=1) + mock.assert_called_with(name=WORKFLOW_NAME, version=1) def test_get_workflow_def_non_existent(mocker, metadata_client, workflow_def): @@ -134,14 +134,14 @@ def test_update_task_def(mocker, metadata_client, task_def): mock = mocker.patch.object(MetadataResourceApi, "update_task_def") metadata_client.update_task_def(task_def) assert mock.called - mock.assert_called_with(task_def) + mock.assert_called_with(body=task_def) def test_unregister_task_def(mocker, metadata_client): mock = mocker.patch.object(MetadataResourceApi, "unregister_task_def") metadata_client.unregister_task_def(TASK_NAME) assert mock.called - mock.assert_called_with(TASK_NAME) + mock.assert_called_with(tasktype=TASK_NAME) def test_get_task_def(mocker, metadata_client, task_def): diff --git a/tests/unit/orkes/test_prompt_client.py b/tests/unit/orkes/test_prompt_client.py new file mode 100644 index 000000000..63fbe85ff --- /dev/null +++ b/tests/unit/orkes/test_prompt_client.py @@ -0,0 +1,209 @@ +import logging +import pytest + +from conductor.client.codegen.rest import ApiException +from conductor.client.configuration.configuration import Configuration +from conductor.client.http.api.prompt_resource_api import PromptResourceApi +from conductor.client.http.models.message_template import MessageTemplate +from conductor.client.http.models.prompt_template_test_request import PromptTemplateTestRequest +from conductor.client.http.models.tag import Tag +from conductor.client.orkes.orkes_prompt_client import OrkesPromptClient + +PROMPT_NAME = "test_prompt" +PROMPT_DESCRIPTION = "Test prompt description" +PROMPT_TEMPLATE = "Hello {{name}}, welcome to {{place}}" +AI_INTEGRATION = "openai" +MODEL_NAME = "gpt-4" + + +@pytest.fixture(scope="module") +def prompt_client(): + configuration = Configuration("http://localhost:8080/api") + return OrkesPromptClient(configuration) + + +@pytest.fixture(scope="module") +def message_template(): + template = MessageTemplate() + template.name = PROMPT_NAME + template.description = PROMPT_DESCRIPTION + template.template = PROMPT_TEMPLATE + return template + + +@pytest.fixture(scope="module") +def tag_list(): + return [Tag(key="env", value="prod"), Tag(key="team", value="platform")] + + +@pytest.fixture(autouse=True) +def disable_logging(): + logging.disable(logging.CRITICAL) + yield + logging.disable(logging.NOTSET) + + +def test_init(prompt_client): + message = "prompt_api is not of type PromptResourceApi" + assert isinstance(prompt_client._prompt_api, PromptResourceApi), message + + +def test_save_prompt(mocker, prompt_client): + mock = mocker.patch.object(PromptResourceApi, "save_message_template") + prompt_client.save_prompt(PROMPT_NAME, PROMPT_DESCRIPTION, PROMPT_TEMPLATE) + mock.assert_called_with(body=PROMPT_TEMPLATE, description=PROMPT_DESCRIPTION, name=PROMPT_NAME) + + +def test_get_prompt(mocker, prompt_client, message_template): + mock = mocker.patch.object(PromptResourceApi, "get_message_template") + mock.return_value = message_template + result = prompt_client.get_prompt(PROMPT_NAME) + mock.assert_called_with(name=PROMPT_NAME) + assert result == message_template + + +def test_get_prompt_not_found(mocker, prompt_client): + mock = mocker.patch.object(PromptResourceApi, "get_message_template") + api_exception = ApiException(status=404) + mock.side_effect = api_exception + result = prompt_client.get_prompt(PROMPT_NAME) + mock.assert_called_with(name=PROMPT_NAME) + assert result is None + + +def test_get_prompts(mocker, prompt_client, message_template): + mock = mocker.patch.object(PromptResourceApi, "get_message_templates") + mock.return_value = [message_template] + result = prompt_client.get_prompts() + assert mock.called + assert result == [message_template] + + +def test_delete_prompt(mocker, prompt_client): + mock = mocker.patch.object(PromptResourceApi, "delete_message_template") + prompt_client.delete_prompt(PROMPT_NAME) + mock.assert_called_with(name=PROMPT_NAME) + + +def test_get_tags_for_prompt_template(mocker, prompt_client, tag_list): + mock = mocker.patch.object(PromptResourceApi, "get_tags_for_prompt_template") + mock.return_value = tag_list + result = prompt_client.get_tags_for_prompt_template(PROMPT_NAME) + mock.assert_called_with(name=PROMPT_NAME) + assert result == tag_list + + +def test_update_tag_for_prompt_template(mocker, prompt_client, tag_list): + mock = mocker.patch.object(PromptResourceApi, "put_tag_for_prompt_template") + prompt_client.update_tag_for_prompt_template(PROMPT_NAME, tag_list) + mock.assert_called_with(body=tag_list, name=PROMPT_NAME) + + +def test_delete_tag_for_prompt_template(mocker, prompt_client, tag_list): + mock = mocker.patch.object(PromptResourceApi, "delete_tag_for_prompt_template") + prompt_client.delete_tag_for_prompt_template(PROMPT_NAME, tag_list) + mock.assert_called_with(body=tag_list, name=PROMPT_NAME) + + +def test_test_prompt(mocker, prompt_client): + mock = mocker.patch.object(PromptResourceApi, "test_message_template") + mock.return_value = "Hello John, welcome to Paris" + variables = {"name": "John", "place": "Paris"} + result = prompt_client.test_prompt( + PROMPT_TEMPLATE, + variables, + AI_INTEGRATION, + MODEL_NAME, + temperature=0.5, + top_p=0.8 + ) + assert mock.called + call_args = mock.call_args[0][0] + assert isinstance(call_args, PromptTemplateTestRequest) + assert call_args.prompt == PROMPT_TEMPLATE + assert call_args.llm_provider == AI_INTEGRATION + assert call_args.model == MODEL_NAME + assert call_args.prompt_variables == variables + assert call_args.temperature == 0.5 + assert call_args.top_p == 0.8 + assert result == "Hello John, welcome to Paris" + + +def test_test_prompt_with_stop_words(mocker, prompt_client): + mock = mocker.patch.object(PromptResourceApi, "test_message_template") + mock.return_value = "Hello John" + variables = {"name": "John"} + stop_words = [".", "!"] + result = prompt_client.test_prompt( + "Hello {{name}}", + variables, + AI_INTEGRATION, + MODEL_NAME, + stop_words=stop_words + ) + assert mock.called + call_args = mock.call_args[0][0] + assert call_args.stop_words == stop_words + assert result == "Hello John" + + +def test_create_message_templates(mocker, prompt_client, message_template): + mock = mocker.patch.object(PromptResourceApi, "create_message_templates") + templates = [message_template] + prompt_client.create_message_templates(templates) + mock.assert_called_with(body=templates) + + +def test_get_prompts_empty(mocker, prompt_client): + mock = mocker.patch.object(PromptResourceApi, "get_message_templates") + mock.return_value = [] + result = prompt_client.get_prompts() + assert mock.called + assert result == [] + + +def test_get_tags_for_prompt_template_empty(mocker, prompt_client): + mock = mocker.patch.object(PromptResourceApi, "get_tags_for_prompt_template") + mock.return_value = [] + result = prompt_client.get_tags_for_prompt_template(PROMPT_NAME) + mock.assert_called_with(name=PROMPT_NAME) + assert result == [] + + +def test_update_tag_for_prompt_template_empty(mocker, prompt_client): + mock = mocker.patch.object(PromptResourceApi, "put_tag_for_prompt_template") + prompt_client.update_tag_for_prompt_template(PROMPT_NAME, []) + mock.assert_called_with(body=[], name=PROMPT_NAME) + + +def test_delete_tag_for_prompt_template_empty(mocker, prompt_client): + mock = mocker.patch.object(PromptResourceApi, "delete_tag_for_prompt_template") + prompt_client.delete_tag_for_prompt_template(PROMPT_NAME, []) + mock.assert_called_with(body=[], name=PROMPT_NAME) + + +def test_test_prompt_default_params(mocker, prompt_client): + mock = mocker.patch.object(PromptResourceApi, "test_message_template") + mock.return_value = "Test output" + variables = {"key": "value"} + result = prompt_client.test_prompt( + "Test {{key}}", + variables, + AI_INTEGRATION, + MODEL_NAME + ) + assert mock.called + call_args = mock.call_args[0][0] + assert call_args.temperature == 0.1 + assert call_args.top_p == 0.9 + assert result == "Test output" + + +def test_create_message_templates_multiple(mocker, prompt_client): + mock = mocker.patch.object(PromptResourceApi, "create_message_templates") + templates = [ + MessageTemplate(name="template1"), + MessageTemplate(name="template2"), + ] + prompt_client.create_message_templates(templates) + mock.assert_called_with(body=templates) diff --git a/tests/unit/orkes/test_scheduler_client.py b/tests/unit/orkes/test_scheduler_client.py index 553df8cc2..25df24cd7 100644 --- a/tests/unit/orkes/test_scheduler_client.py +++ b/tests/unit/orkes/test_scheduler_client.py @@ -43,7 +43,7 @@ def save_schedule_request(): def test_init(scheduler_client): message = "schedulerResourceApi is not of type SchedulerResourceApi" assert isinstance( - scheduler_client.schedulerResourceApi, SchedulerResourceApi + scheduler_client._scheduler_api, SchedulerResourceApi ), message @@ -51,7 +51,7 @@ def test_save_schedule(mocker, scheduler_client, save_schedule_request): mock = mocker.patch.object(SchedulerResourceApi, "save_schedule") scheduler_client.save_schedule(save_schedule_request) assert mock.called - mock.assert_called_with(save_schedule_request) + mock.assert_called_with(body=save_schedule_request) def test_get_schedule(mocker, scheduler_client, workflow_schedule): @@ -60,7 +60,7 @@ def test_get_schedule(mocker, scheduler_client, workflow_schedule): schedule = scheduler_client.get_schedule(SCHEDULE_NAME) assert schedule == workflow_schedule assert mock.called - mock.assert_called_with(SCHEDULE_NAME) + mock.assert_called_with(name=SCHEDULE_NAME) def test_get_schedule_non_existing(mocker, scheduler_client): @@ -98,7 +98,7 @@ def test_get_next_few_schedule_execution_times(mocker, scheduler_client): mock.return_value = [1698093000000, 1698093300000, 1698093600000] times = scheduler_client.get_next_few_schedule_execution_times(cron_expression) assert len(times) == expected_next_few_schedule_execution_times - mock.assert_called_with(cron_expression) + mock.assert_called_with(cron_expression=cron_expression) def test_get_next_few_schedule_execution_times_with_optional_params( @@ -113,7 +113,7 @@ def test_get_next_few_schedule_execution_times_with_optional_params( ) assert len(times) == expected_next_few_schedule_execution_times mock.assert_called_with( - cron_expression, + cron_expression=cron_expression, schedule_start_time=1698093300000, schedule_end_time=1698093600000, limit=2, @@ -123,13 +123,13 @@ def test_get_next_few_schedule_execution_times_with_optional_params( def test_delete_schedule(mocker, scheduler_client): mock = mocker.patch.object(SchedulerResourceApi, "delete_schedule") scheduler_client.delete_schedule(SCHEDULE_NAME) - mock.assert_called_with(SCHEDULE_NAME) + mock.assert_called_with(name=SCHEDULE_NAME) def test_pause_schedule(mocker, scheduler_client): mock = mocker.patch.object(SchedulerResourceApi, "pause_schedule") scheduler_client.pause_schedule(SCHEDULE_NAME) - mock.assert_called_with(SCHEDULE_NAME) + mock.assert_called_with(name=SCHEDULE_NAME) def test_pause_all_schedules(mocker, scheduler_client): @@ -141,7 +141,7 @@ def test_pause_all_schedules(mocker, scheduler_client): def test_resume_schedule(mocker, scheduler_client): mock = mocker.patch.object(SchedulerResourceApi, "resume_schedule") scheduler_client.resume_schedule(SCHEDULE_NAME) - mock.assert_called_with(SCHEDULE_NAME) + mock.assert_called_with(name=SCHEDULE_NAME) def test_resume_all_schedules(mocker, scheduler_client): @@ -193,7 +193,7 @@ def test_get_scheduler_tags(mocker, scheduler_client): tag2 = MetadataTag("tag2", "val2") mock.return_value = [tag1, tag2] tags = scheduler_client.get_scheduler_tags(SCHEDULE_NAME) - mock.assert_called_with(SCHEDULE_NAME) + mock.assert_called_with(name=SCHEDULE_NAME) assert len(tags) == expected_tags_len diff --git a/tests/unit/orkes/test_schema_client.py b/tests/unit/orkes/test_schema_client.py index b93450ecd..11bc5a10c 100644 --- a/tests/unit/orkes/test_schema_client.py +++ b/tests/unit/orkes/test_schema_client.py @@ -31,14 +31,14 @@ def schema_def(): def test_init(schema_client): message = "schemaApi is not of type SchemaApi" - assert isinstance(schema_client.schemaApi, SchemaResourceApi), message + assert isinstance(schema_client._schema_api, SchemaResourceApi), message def test_register_schema(mocker, schema_client, schema_def): mock = mocker.patch.object(SchemaResourceApi, "save") schema_client.register_schema(schema_def) assert mock.called - mock.assert_called_with(schema_def) + mock.assert_called_with(body=schema_def) def test_get_schema(mocker, schema_client, schema_def): diff --git a/tests/unit/orkes/test_secret_client.py b/tests/unit/orkes/test_secret_client.py index 1466d10e7..6438bf4ad 100644 --- a/tests/unit/orkes/test_secret_client.py +++ b/tests/unit/orkes/test_secret_client.py @@ -27,20 +27,20 @@ def disable_logging(): def test_init(secret_client): message = "secretResourceApi is not of type SecretResourceApi" - assert isinstance(secret_client.secretResourceApi, SecretResourceApi), message + assert isinstance(secret_client._secret_api, SecretResourceApi), message def test_put_secret(mocker, secret_client): mock = mocker.patch.object(SecretResourceApi, "put_secret") secret_client.put_secret(SECRET_KEY, SECRET_VALUE) - mock.assert_called_with(SECRET_VALUE, SECRET_KEY) + mock.assert_called_with(body=SECRET_VALUE, key=SECRET_KEY) def test_get_secret(mocker, secret_client): mock = mocker.patch.object(SecretResourceApi, "get_secret") mock.return_value = SECRET_VALUE secret = secret_client.get_secret(SECRET_KEY) - mock.assert_called_with(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) assert secret == SECRET_VALUE @@ -67,14 +67,14 @@ def test_list_secrets_that_user_can_grant_access_to(mocker, secret_client): def test_delete_secret(mocker, secret_client): mock = mocker.patch.object(SecretResourceApi, "delete_secret") secret_client.delete_secret(SECRET_KEY) - mock.assert_called_with(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) def test_secret_exists(mocker, secret_client): mock = mocker.patch.object(SecretResourceApi, "secret_exists") mock.return_value = True assert secret_client.secret_exists(SECRET_KEY) is True - mock.assert_called_with(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) def test_set_secret_tags(mocker, secret_client): @@ -83,7 +83,7 @@ def test_set_secret_tags(mocker, secret_client): tag2 = MetadataTag("tag2", "val2") tags = [tag1, tag2] secret_client.set_secret_tags(tags, SECRET_KEY) - mock.assert_called_with(tags, SECRET_KEY) + mock.assert_called_with(body=tags, key=SECRET_KEY) def test_get_secret_tags(mocker, secret_client): @@ -93,7 +93,7 @@ def test_get_secret_tags(mocker, secret_client): tag2 = MetadataTag("tag2", "val2") mock.return_value = [tag1, tag2] tags = secret_client.get_secret_tags(SECRET_KEY) - mock.assert_called_with(SECRET_KEY) + mock.assert_called_with(key=SECRET_KEY) assert len(tags) == expected_tags_len @@ -103,4 +103,4 @@ def test_delete_secret_tags(mocker, secret_client): tag2 = MetadataTag("tag2", "val2") tags = [tag1, tag2] secret_client.delete_secret_tags(tags, SECRET_KEY) - mock.assert_called_with(tags, SECRET_KEY) + mock.assert_called_with(body=tags, key=SECRET_KEY) diff --git a/tests/unit/orkes/test_task_client.py b/tests/unit/orkes/test_task_client.py index 45ea67aea..6f79b7e33 100644 --- a/tests/unit/orkes/test_task_client.py +++ b/tests/unit/orkes/test_task_client.py @@ -60,14 +60,14 @@ def tasks(): def test_init(task_client): message = "taskResourceApi is not of type TaskResourceApi" - assert isinstance(task_client.taskResourceApi, TaskResourceApi), message + assert isinstance(task_client._task_api, TaskResourceApi), message def test_poll_task(mocker, task_client, tasks): mock = mocker.patch.object(TaskResourceApi, "poll") mock.return_value = tasks[0] polled_task = task_client.poll_task(TASK_NAME) - mock.assert_called_with(TASK_NAME) + mock.assert_called_with(tasktype=TASK_NAME) assert polled_task == tasks[0] @@ -75,7 +75,7 @@ def test_poll_task_with_worker_and_domain(mocker, task_client, tasks): mock = mocker.patch.object(TaskResourceApi, "poll") mock.return_value = tasks[0] polled_task = task_client.poll_task(TASK_NAME, WORKER_ID, DOMAIN) - mock.assert_called_with(TASK_NAME, workerid=WORKER_ID, domain=DOMAIN) + mock.assert_called_with(tasktype=TASK_NAME, workerid=WORKER_ID, domain=DOMAIN) assert polled_task == tasks[0] @@ -83,7 +83,7 @@ def test_poll_task_no_tasks(mocker, task_client): mock = mocker.patch.object(TaskResourceApi, "poll") mock.return_value = None polled_task = task_client.poll_task(TASK_NAME) - mock.assert_called_with(TASK_NAME) + mock.assert_called_with(tasktype=TASK_NAME) assert polled_task is None @@ -91,7 +91,7 @@ def test_batch_poll_tasks(mocker, task_client, tasks): mock = mocker.patch.object(TaskResourceApi, "batch_poll") mock.return_value = tasks polled_tasks = task_client.batch_poll_tasks(TASK_NAME, WORKER_ID, 3, 200) - mock.assert_called_with(TASK_NAME, workerid=WORKER_ID, count=3, timeout=200) + mock.assert_called_with(tasktype=TASK_NAME, workerid=WORKER_ID, count=3, timeout=200) assert len(polled_tasks) == len(tasks) @@ -100,7 +100,7 @@ def test_batch_poll_tasks_in_domain(mocker, task_client, tasks): mock.return_value = tasks polled_tasks = task_client.batch_poll_tasks(TASK_NAME, WORKER_ID, 3, 200, DOMAIN) mock.assert_called_with( - TASK_NAME, workerid=WORKER_ID, domain=DOMAIN, count=3, timeout=200 + tasktype=TASK_NAME, workerid=WORKER_ID, domain=DOMAIN, count=3, timeout=200 ) assert len(polled_tasks) == len(tasks) @@ -109,7 +109,7 @@ def test_get_task(mocker, task_client, tasks): mock = mocker.patch.object(TaskResourceApi, "get_task") mock.return_value = tasks[0] task = task_client.get_task(TASK_ID) - mock.assert_called_with(TASK_ID) + mock.assert_called_with(task_id=TASK_ID) assert task.task_id == TASK_ID @@ -127,7 +127,7 @@ def test_update_task(mocker, task_client): mock = mocker.patch.object(TaskResourceApi, "update_task") task_result_status = TaskResult(task_id=TASK_ID, status=TaskResultStatus.COMPLETED) task_client.update_task(task_result_status) - mock.assert_called_with(task_result_status) + mock.assert_called_with(body=task_result_status) def test_update_task_by_ref_name(mocker, task_client): @@ -135,7 +135,7 @@ def test_update_task_by_ref_name(mocker, task_client): status = TaskResultStatus.COMPLETED output = {"a": 56} task_client.update_task_by_ref_name("wf_id", "test_task_ref_name", status, output) - mock.assert_called_with({"result": output}, "wf_id", "test_task_ref_name", status) + mock.assert_called_with(body={"result": output}, workflow_id="wf_id", task_ref_name="test_task_ref_name", status=status) def test_update_task_by_ref_name_with_worker_id(mocker, task_client): @@ -146,7 +146,7 @@ def test_update_task_by_ref_name_with_worker_id(mocker, task_client): "wf_id", "test_task_ref_name", status, output, "worker_id" ) mock.assert_called_with( - {"result": output}, "wf_id", "test_task_ref_name", status, workerid="worker_id" + body={"result": output}, workflow_id="wf_id", task_ref_name="test_task_ref_name", status=status, workerid="worker_id" ) @@ -160,7 +160,7 @@ def test_update_task_sync(mocker, task_client): returned_workflow = task_client.update_task_sync( workflow_id, "test_task_ref_name", status, output ) - mock.assert_called_with(output, workflow_id, "test_task_ref_name", status) + mock.assert_called_with(body=output, workflow_id=workflow_id, task_ref_name="test_task_ref_name", status=status) assert returned_workflow == workflow @@ -175,7 +175,7 @@ def test_update_task_sync_with_worker_id(mocker, task_client): workflow_id, "test_task_ref_name", status, output, "worker_id" ) mock.assert_called_with( - output, workflow_id, "test_task_ref_name", status, workerid="worker_id" + body=output, workflow_id=workflow_id, task_ref_name="test_task_ref_name", status=status, workerid="worker_id" ) assert returned_workflow == workflow @@ -211,5 +211,5 @@ def test_get_task_logs(mocker, task_client): task_exec_log2 = TaskExecLog("Test log 2", TASK_ID) mock.return_value = [task_exec_log1, task_exec_log2] logs = task_client.get_task_logs(TASK_ID) - mock.assert_called_with(TASK_ID) + mock.assert_called_with(task_id=TASK_ID) assert len(logs) == expected_log_len diff --git a/tests/unit/orkes/test_workflow_client.py b/tests/unit/orkes/test_workflow_client.py index fc882f711..c1ed50bc2 100644 --- a/tests/unit/orkes/test_workflow_client.py +++ b/tests/unit/orkes/test_workflow_client.py @@ -41,7 +41,7 @@ def workflow_input(): def test_init(workflow_client): message = "workflowResourceApi is not of type WorkflowResourceApi" - assert isinstance(workflow_client.workflowResourceApi, WorkflowResourceApi), message + assert isinstance(workflow_client._workflow_api, WorkflowResourceApi), message def test_start_workflow_by_name(mocker, workflow_client, workflow_input): @@ -93,7 +93,7 @@ def test_start_workflow(mocker, workflow_client): mock.return_value = WORKFLOW_UUID start_workflow_req = StartWorkflowRequest() wf_id = workflow_client.start_workflow(start_workflow_req) - mock.assert_called_with(start_workflow_req) + mock.assert_called_with(body=start_workflow_req) assert wf_id == WORKFLOW_UUID @@ -121,64 +121,64 @@ def test_execute_workflow(mocker, workflow_client): def test_pause_workflow(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "pause_workflow") workflow_client.pause_workflow(WORKFLOW_UUID) - mock.assert_called_with(WORKFLOW_UUID) + mock.assert_called_with(workflow_id=WORKFLOW_UUID) def test_resume_workflow(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "resume_workflow") workflow_client.resume_workflow(WORKFLOW_UUID) - mock.assert_called_with(WORKFLOW_UUID) + mock.assert_called_with(workflow_id=WORKFLOW_UUID) def test_restart_workflow(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "restart") workflow_client.restart_workflow(WORKFLOW_UUID) - mock.assert_called_with(WORKFLOW_UUID) + mock.assert_called_with(workflow_id=WORKFLOW_UUID) def test_restart_workflow_with_latest_wf_def(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "restart") workflow_client.restart_workflow(WORKFLOW_UUID, True) - mock.assert_called_with(WORKFLOW_UUID, use_latest_definitions=True) + mock.assert_called_with(workflow_id=WORKFLOW_UUID, use_latest_definitions=True) def test_rerun_workflow(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "rerun") re_run_req = RerunWorkflowRequest() workflow_client.rerun_workflow(WORKFLOW_UUID, re_run_req) - mock.assert_called_with(re_run_req, WORKFLOW_UUID) + mock.assert_called_with(body=re_run_req, workflow_id=WORKFLOW_UUID) def test_retry_workflow(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "retry") workflow_client.retry_workflow(WORKFLOW_UUID) - mock.assert_called_with(WORKFLOW_UUID) + mock.assert_called_with(workflow_id=WORKFLOW_UUID) def test_retry_workflow_with_resume_subworkflow_tasks(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "retry") workflow_client.retry_workflow(WORKFLOW_UUID, True) - mock.assert_called_with(WORKFLOW_UUID, resume_subworkflow_tasks=True) + mock.assert_called_with(workflow_id=WORKFLOW_UUID, resume_subworkflow_tasks=True) def test_terminate_workflow(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "terminate1") workflow_client.terminate_workflow(WORKFLOW_UUID) - mock.assert_called_with(WORKFLOW_UUID) + mock.assert_called_with(workflow_id=WORKFLOW_UUID) def test_terminate_workflow_with_reason(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "terminate1") reason = "Unit test failed" workflow_client.terminate_workflow(WORKFLOW_UUID, reason) - mock.assert_called_with(WORKFLOW_UUID, reason=reason) + mock.assert_called_with(workflow_id=WORKFLOW_UUID, reason=reason) def test_get_workflow(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "get_execution_status") mock.return_value = Workflow(workflow_id=WORKFLOW_UUID) workflow = workflow_client.get_workflow(WORKFLOW_UUID) - mock.assert_called_with(WORKFLOW_UUID, include_tasks=True) + mock.assert_called_with(workflow_id=WORKFLOW_UUID, include_tasks=True) assert workflow.workflow_id == WORKFLOW_UUID @@ -186,7 +186,7 @@ def test_get_workflow_without_tasks(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "get_execution_status") mock.return_value = Workflow(workflow_id=WORKFLOW_UUID) workflow = workflow_client.get_workflow(WORKFLOW_UUID, False) - mock.assert_called_with(WORKFLOW_UUID) + mock.assert_called_with(workflow_id=WORKFLOW_UUID) assert workflow.workflow_id == WORKFLOW_UUID @@ -203,13 +203,13 @@ def test_get_workflow_non_existent(mocker, workflow_client): def test_delete_workflow(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "delete1") workflow_client.delete_workflow(WORKFLOW_UUID) - mock.assert_called_with(WORKFLOW_UUID, archive_workflow=True) + mock.assert_called_with(workflow_id=WORKFLOW_UUID, archive_workflow=True) def test_delete_workflow_without_archival(mocker, workflow_client): mock = mocker.patch.object(WorkflowResourceApi, "delete1") workflow_client.delete_workflow(WORKFLOW_UUID, False) - mock.assert_called_with(WORKFLOW_UUID, archive_workflow=False) + mock.assert_called_with(workflow_id=WORKFLOW_UUID, archive_workflow=False) def test_skip_task_from_workflow(mocker, workflow_client): @@ -217,7 +217,7 @@ def test_skip_task_from_workflow(mocker, workflow_client): task_ref_name = TASK_NAME + "_ref" request = SkipTaskRequest() workflow_client.skip_task_from_workflow(WORKFLOW_UUID, task_ref_name, request) - mock.assert_called_with(WORKFLOW_UUID, task_ref_name, request) + mock.assert_called_with(workflow_id=WORKFLOW_UUID, task_reference_name=task_ref_name, body=request) def test_test_workflow(mocker, workflow_client): @@ -227,5 +227,5 @@ def test_test_workflow(mocker, workflow_client): workflow_def=WorkflowDef(name=WORKFLOW_NAME, version=1), name=WORKFLOW_NAME ) workflow = workflow_client.test_workflow(test_request) - mock.assert_called_with(test_request) + mock.assert_called_with(body=test_request) assert workflow.workflow_id == WORKFLOW_UUID