From 8b3e3a2a95b5186cb283763deb9640be73ceac23 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Wed, 29 Oct 2025 20:56:30 +0100 Subject: [PATCH 01/47] feat: bootstrap sandbox module --- bumpver.toml | 1 + examples/01_create_sandbox.py | 42 +++ examples/02_basic_commands.py | 51 +++ examples/03_streaming_output.py | 58 ++++ examples/04_environment_variables.py | 45 +++ examples/05_working_directory.py | 49 +++ examples/06_file_operations.py | 50 +++ examples/07_directory_operations.py | 57 ++++ examples/08_binary_files.py | 48 +++ examples/09_batch_operations.py | 61 ++++ examples/10_upload_download.py | 64 ++++ examples/11_file_manipulation.py | 61 ++++ examples/README.md | 58 ++++ koyeb/__init__.py | 5 + koyeb/sandbox/__init__.py | 26 ++ koyeb/sandbox/exec.py | 409 ++++++++++++++++++++++++ koyeb/sandbox/filesystem.py | 447 +++++++++++++++++++++++++++ koyeb/sandbox/sandbox.py | 248 +++++++++++++++ koyeb/sandbox/utils.py | 159 ++++++++++ pyproject.toml | 2 + uv.lock | 95 +++++- 21 files changed, 2035 insertions(+), 1 deletion(-) create mode 100644 examples/01_create_sandbox.py create mode 100644 examples/02_basic_commands.py create mode 100644 examples/03_streaming_output.py create mode 100644 examples/04_environment_variables.py create mode 100644 examples/05_working_directory.py create mode 100644 examples/06_file_operations.py create mode 100644 examples/07_directory_operations.py create mode 100644 examples/08_binary_files.py create mode 100644 examples/09_batch_operations.py create mode 100644 examples/10_upload_download.py create mode 100644 examples/11_file_manipulation.py create mode 100644 examples/README.md create mode 100644 koyeb/sandbox/__init__.py create mode 100644 koyeb/sandbox/exec.py create mode 100644 koyeb/sandbox/filesystem.py create mode 100644 koyeb/sandbox/sandbox.py create mode 100644 koyeb/sandbox/utils.py diff --git a/bumpver.toml b/bumpver.toml index 56db0e16..cb66d24a 100644 --- a/bumpver.toml +++ b/bumpver.toml @@ -13,3 +13,4 @@ push = false "koyeb/api/__init__.py" = ['__version__ = "{version}"'] "koyeb/api/api_client.py" = ['OpenAPI-Generator/{version}'] "koyeb/api/configuration.py" = ['SDK Package Version: {version}'] +"koyeb/sandbox/__init__.py" = ['__version__ = "{version}"'] diff --git a/examples/01_create_sandbox.py b/examples/01_create_sandbox.py new file mode 100644 index 00000000..bc4f7e35 --- /dev/null +++ b/examples/01_create_sandbox.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +"""Create and manage a sandbox""" + +import asyncio +import os + +from koyeb import Sandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await Sandbox.create( + image="python:3.11", + name="example-sandbox", + wait_ready=True, + api_token=api_token, + ) + + # Check status + status = await sandbox.status() + is_healthy = await sandbox.is_healthy() + print(f"Status: {status}, Healthy: {is_healthy}") + + # Test command + result = await sandbox.exec("echo 'Sandbox is ready!'") + print(result.stdout.strip()) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/02_basic_commands.py b/examples/02_basic_commands.py new file mode 100644 index 00000000..9ff12565 --- /dev/null +++ b/examples/02_basic_commands.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +"""Basic command execution""" + +import asyncio +import os + +from koyeb import Sandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await Sandbox.create( + image="python:3.11", + name="basic-commands", + wait_ready=True, + api_token=api_token, + ) + + # Simple command + result = await sandbox.exec("echo 'Hello World'") + print(result.stdout.strip()) + + # Python command + result = await sandbox.exec("python3 -c 'print(2 + 2)'") + print(result.stdout.strip()) + + # Multi-line Python script + result = await sandbox.exec( + '''python3 -c " +import sys +print(f'Python version: {sys.version.split()[0]}') +print(f'Platform: {sys.platform}') +"''' + ) + print(result.stdout.strip()) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/03_streaming_output.py b/examples/03_streaming_output.py new file mode 100644 index 00000000..12c31b33 --- /dev/null +++ b/examples/03_streaming_output.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +"""Streaming command output""" + +import asyncio +import os + +from koyeb import Sandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await Sandbox.create( + image="python:3.11", + name="streaming", + wait_ready=True, + api_token=api_token, + ) + + # Stream output in real-time + result = await sandbox.exec( + '''python3 -c " +import time +for i in range(5): + print(f'Line {i+1}') + time.sleep(0.5) +"''', + on_stdout=lambda data: print(data.strip(), end=" "), + on_stderr=lambda data: print(f"ERR: {data.strip()}"), + ) + print(f"\nExit code: {result.exit_code}") + + # Stream a script + await sandbox.filesystem.write_file( + "/tmp/counter.py", + "#!/usr/bin/env python3\nimport time\nfor i in range(1, 6):\n print(f'Count: {i}')\n time.sleep(0.3)\nprint('Done!')\n", + ) + await sandbox.exec("chmod +x /tmp/counter.py") + + result = await sandbox.exec( + "python3 /tmp/counter.py", + on_stdout=lambda data: print(data.strip()), + ) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/04_environment_variables.py b/examples/04_environment_variables.py new file mode 100644 index 00000000..bc8bb431 --- /dev/null +++ b/examples/04_environment_variables.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 +"""Environment variables in commands""" + +import asyncio +import os + +from koyeb import Sandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await Sandbox.create( + image="python:3.11", + name="env-vars", + wait_ready=True, + api_token=api_token, + ) + + # Set environment variables + env_vars = {"MY_VAR": "Hello", "DEBUG": "true"} + result = await sandbox.exec("env | grep MY_VAR", env=env_vars) + print(result.stdout.strip()) + + # Use in Python command + result = await sandbox.exec( + 'python3 -c "import os; print(os.getenv(\'MY_VAR\'))"', + env={"MY_VAR": "Hello from Python!"}, + ) + print(result.stdout.strip()) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/05_working_directory.py b/examples/05_working_directory.py new file mode 100644 index 00000000..bbfceb5f --- /dev/null +++ b/examples/05_working_directory.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 +"""Working directory for commands""" + +import asyncio +import os + +from koyeb import Sandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await Sandbox.create( + image="python:3.11", + name="working-dir", + wait_ready=True, + api_token=api_token, + ) + + # Setup: create directory structure + await sandbox.exec("mkdir -p /tmp/my_project/src") + await sandbox.exec("echo 'print(\\\"hello\\\")' > /tmp/my_project/src/main.py") + + # Run command in specific directory + result = await sandbox.exec("pwd", cwd="/tmp/my_project") + print(result.stdout.strip()) + + # List files in working directory + result = await sandbox.exec("ls -la", cwd="/tmp/my_project") + print(result.stdout.strip()) + + # Use relative paths + result = await sandbox.exec("cat src/main.py", cwd="/tmp/my_project") + print(result.stdout.strip()) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/06_file_operations.py b/examples/06_file_operations.py new file mode 100644 index 00000000..004875d0 --- /dev/null +++ b/examples/06_file_operations.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +"""Basic file operations""" + +import asyncio +import os + +from koyeb import Sandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await Sandbox.create( + image="python:3.11", + name="file-ops", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Write file + content = "Hello, Koyeb Sandbox!\nThis is a test file." + await fs.write_file("/tmp/hello.txt", content) + + # Read file + file_info = await fs.read_file("/tmp/hello.txt") + print(file_info.content) + + # Write Python script + python_code = "#!/usr/bin/env python3\nprint('Hello from Python!')\n" + await fs.write_file("/tmp/script.py", python_code) + await sandbox.exec("chmod +x /tmp/script.py") + result = await sandbox.exec("/tmp/script.py") + print(result.stdout.strip()) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/07_directory_operations.py b/examples/07_directory_operations.py new file mode 100644 index 00000000..9d2d14a6 --- /dev/null +++ b/examples/07_directory_operations.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 +"""Directory operations""" + +import asyncio +import os + +from koyeb import Sandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await Sandbox.create( + image="python:3.11", + name="directory-ops", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Create directory + await fs.mkdir("/tmp/my_project") + + # Create nested directories + await fs.mkdir("/tmp/my_project/src/utils", recursive=True) + + # List directory + contents = await fs.list_dir("/tmp/my_project") + print(f"Contents: {contents}") + + # Create project structure + await fs.mkdir("/tmp/my_project/src", recursive=True) + await fs.mkdir("/tmp/my_project/tests", recursive=True) + await fs.write_file("/tmp/my_project/src/main.py", "print('Hello')") + await fs.write_file("/tmp/my_project/README.md", "# My Project") + + # Check if path exists + exists = await fs.exists("/tmp/my_project") + is_dir = await fs.is_dir("/tmp/my_project") + is_file = await fs.is_file("/tmp/my_project/src/main.py") + print(f"Exists: {exists}, Is dir: {is_dir}, Is file: {is_file}") + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/08_binary_files.py b/examples/08_binary_files.py new file mode 100644 index 00000000..10eb5964 --- /dev/null +++ b/examples/08_binary_files.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 +"""Binary file operations""" + +import asyncio +import base64 +import os + +from koyeb import Sandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await Sandbox.create( + image="python:3.11", + name="binary-files", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Write binary data + binary_data = b"Binary data: \x00\x01\x02\x03\xff\xfe\xfd" + base64_data = base64.b64encode(binary_data).decode("utf-8") + await fs.write_file("/tmp/binary.bin", base64_data, encoding="base64") + + # Read binary data + file_info = await fs.read_file("/tmp/binary.bin", encoding="base64") + decoded = base64.b64decode(file_info.content) + print(f"Original: {binary_data}") + print(f"Decoded: {decoded}") + assert binary_data == decoded + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/09_batch_operations.py b/examples/09_batch_operations.py new file mode 100644 index 00000000..8e43f42c --- /dev/null +++ b/examples/09_batch_operations.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 +"""Batch file operations""" + +import asyncio +import os + +from koyeb import Sandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await Sandbox.create( + image="python:3.11", + name="batch-ops", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Write multiple files at once + files_to_create = [ + {"path": "/tmp/file1.txt", "content": "Content of file 1"}, + {"path": "/tmp/file2.txt", "content": "Content of file 2"}, + {"path": "/tmp/file3.txt", "content": "Content of file 3"}, + ] + + await fs.write_files(files_to_create) + print("Created 3 files") + + # Verify + created_files = await fs.ls("/tmp") + batch_files = [f for f in created_files if f.startswith("file")] + print(f"Files: {batch_files}") + + # Create project structure + project_files = [ + {"path": "/tmp/project/main.py", "content": "print('Hello')"}, + {"path": "/tmp/project/utils.py", "content": "def helper(): pass"}, + {"path": "/tmp/project/README.md", "content": "# My Project"}, + ] + + await fs.mkdir("/tmp/project", recursive=True) + await fs.write_files(project_files) + print("Created project structure") + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/10_upload_download.py b/examples/10_upload_download.py new file mode 100644 index 00000000..f7d57553 --- /dev/null +++ b/examples/10_upload_download.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +"""Upload and download files""" + +import asyncio +import os +import tempfile + +from koyeb import Sandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await Sandbox.create( + image="python:3.11", + name="upload-download", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Upload local file to sandbox + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".txt") as f: + f.write("This is a local file\n") + f.write("Uploaded to Koyeb Sandbox!") + local_file = f.name + + try: + await fs.upload_file(local_file, "/tmp/uploaded_file.txt") + uploaded_info = await fs.read_file("/tmp/uploaded_file.txt") + print(uploaded_info.content) + finally: + os.unlink(local_file) + + # Download file from sandbox + await fs.write_file( + "/tmp/download_source.txt", "Download test content\nMultiple lines" + ) + + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix="_downloaded.txt") as f: + download_path = f.name + + try: + await fs.download_file("/tmp/download_source.txt", download_path) + with open(download_path, "r") as f: + print(f.read()) + finally: + os.unlink(download_path) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/11_file_manipulation.py b/examples/11_file_manipulation.py new file mode 100644 index 00000000..8177a3ab --- /dev/null +++ b/examples/11_file_manipulation.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 +"""File manipulation operations""" + +import asyncio +import os + +from koyeb import Sandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await Sandbox.create( + image="python:3.11", + name="file-manip", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Setup + await fs.write_file("/tmp/file1.txt", "Content of file 1") + await fs.write_file("/tmp/file2.txt", "Content of file 2") + await fs.mkdir("/tmp/test_dir", recursive=True) + + # Rename file + await fs.rename_file("/tmp/file1.txt", "/tmp/renamed_file.txt") + print(f"Renamed: {await fs.exists('/tmp/renamed_file.txt')}") + + # Move file + await fs.move_file("/tmp/file2.txt", "/tmp/test_dir/moved_file.txt") + print(f"Moved: {await fs.exists('/tmp/test_dir/moved_file.txt')}") + + # Copy file (read + write) + original_content = await fs.read_file("/tmp/renamed_file.txt") + await fs.write_file("/tmp/test_dir/copied_file.txt", original_content.content) + print(f"Copied: {await fs.exists('/tmp/test_dir/copied_file.txt')}") + + # Delete file + await fs.rm("/tmp/renamed_file.txt") + print(f"Deleted: {not await fs.exists('/tmp/renamed_file.txt')}") + + # Delete directory + await fs.rm("/tmp/test_dir", recursive=True) + print(f"Directory deleted: {not await fs.exists('/tmp/test_dir')}") + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 00000000..a4067b5a --- /dev/null +++ b/examples/README.md @@ -0,0 +1,58 @@ +# Koyeb Sandbox Examples + +A collection of examples demonstrating the Koyeb Sandbox SDK capabilities. + +## Quick Start + +```bash +# Set your API token +export KOYEB_API_TOKEN=your_api_token_here + +# Run individual examples +uv run python examples/01_create_sandbox.py +``` + +## Examples + +- **01_create_sandbox.py** - Create and manage sandbox instances +- **02_basic_commands.py** - Basic command execution +- **03_streaming_output.py** - Real-time streaming output +- **04_environment_variables.py** - Environment variable configuration +- **05_working_directory.py** - Working directory management +- **06_file_operations.py** - File read/write operations +- **07_directory_operations.py** - Directory management +- **08_binary_files.py** - Binary file handling +- **09_batch_operations.py** - Batch file operations +- **10_upload_download.py** - File upload and download +- **11_file_manipulation.py** - File manipulation operations + +## Basic Usage + +```python +from koyeb import Sandbox + +# Create a sandbox +sandbox = await Sandbox.create( + image="python:3.11", + name="my-sandbox", + wait_ready=True, + api_token=api_token, +) + +# Execute commands +result = await sandbox.exec("echo 'Hello World'") +print(result.stdout) + +# Use filesystem +await sandbox.filesystem.write_file("/tmp/file.txt", "Hello!") +content = await sandbox.filesystem.read_file("/tmp/file.txt") + +# Cleanup +await sandbox.delete() +``` + +## Prerequisites + +- Koyeb API token from https://app.koyeb.com/account/api +- Python 3.9+ +- `uv` package manager (or `pip`) diff --git a/koyeb/__init__.py b/koyeb/__init__.py index dba7c501..1e21041c 100644 --- a/koyeb/__init__.py +++ b/koyeb/__init__.py @@ -1,3 +1,8 @@ # coding: utf-8 __version__ = "1.0.3" + +# Make Sandbox available at package level +from .sandbox import Sandbox + +__all__ = ["Sandbox"] diff --git a/koyeb/sandbox/__init__.py b/koyeb/sandbox/__init__.py new file mode 100644 index 00000000..c0057c98 --- /dev/null +++ b/koyeb/sandbox/__init__.py @@ -0,0 +1,26 @@ +# coding: utf-8 + +""" +Koyeb Sandbox - Interactive execution environment for running arbitrary code on Koyeb +""" + +__version__ = "1.0.3" + +from koyeb.api.models.instance_status import InstanceStatus as SandboxStatus + +from .exec import CommandResult, CommandStatus, SandboxCommandError, SandboxExecutor +from .filesystem import FileInfo, SandboxFilesystem +from .sandbox import Sandbox +from .utils import SandboxError + +__all__ = [ + "Sandbox", + "SandboxFilesystem", + "SandboxExecutor", + "FileInfo", + "SandboxStatus", + "SandboxError", + "CommandResult", + "CommandStatus", + "SandboxCommandError", +] diff --git a/koyeb/sandbox/exec.py b/koyeb/sandbox/exec.py new file mode 100644 index 00000000..e53c5f82 --- /dev/null +++ b/koyeb/sandbox/exec.py @@ -0,0 +1,409 @@ +# coding: utf-8 + +""" +Command execution utilities for Koyeb Sandbox instances +Using WebSocket connection to Koyeb API +""" + +import asyncio +import base64 +import json +import shlex +import time +from dataclasses import dataclass +from enum import Enum +from typing import Callable, Dict, List, Optional, Union + +import websockets + +from koyeb.api.models.stream_result_of_exec_command_reply import ( + StreamResultOfExecCommandReply, +) + +from .utils import SandboxError, get_api_client + + +class CommandStatus(str, Enum): + """Command execution status""" + + RUNNING = "running" + FINISHED = "finished" + FAILED = "failed" + + +@dataclass +class CommandResult: + """Result of a command execution using Koyeb API models""" + + stdout: str = "" + stderr: str = "" + exit_code: int = 0 + status: CommandStatus = CommandStatus.FINISHED + duration: float = 0.0 + command: str = "" + args: Optional[List[str]] = None + + def __post_init__(self): + if self.args is None: + self.args = [] + + @property + def success(self) -> bool: + """Check if command executed successfully""" + return self.exit_code == 0 and self.status == CommandStatus.FINISHED + + @property + def output(self) -> str: + """Get combined stdout and stderr output""" + return self.stdout + (f"\n{self.stderr}" if self.stderr else "") + + +class SandboxCommandError(SandboxError): + """Raised when command execution fails""" + + +class SandboxExecutor: + """ + Command execution interface for Koyeb Sandbox instances. + Bound to a specific sandbox instance. + """ + + def __init__(self, sandbox): + self.sandbox = sandbox + + async def __call__( + self, + command: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + timeout: int = 30, + on_stdout: Optional[Callable[[str], None]] = None, + on_stderr: Optional[Callable[[str], None]] = None, + ) -> CommandResult: + """ + Execute a command in a shell. Supports streaming output via callbacks. + + Args: + command: Command to execute as a string (e.g., "python -c 'print(2+2)'") + cwd: Working directory for the command + env: Environment variables for the command + timeout: Command timeout in seconds + on_stdout: Optional callback for streaming stdout chunks + on_stderr: Optional callback for streaming stderr chunks + + Returns: + CommandResult: Result of the command execution + + Example: + ```python + # Without streaming + result = await sandbox.exec("echo hello") + + # With streaming + result = await sandbox.exec( + "echo hello; sleep 1; echo world", + on_stdout=lambda data: print(f"OUT: {data}"), + on_stderr=lambda data: print(f"ERR: {data}"), + ) + ``` + """ + return await _exec_async( + instance_id=self.sandbox.instance_id, + command=command, + cwd=cwd, + env=env, + timeout=timeout, + api_token=self.sandbox.api_token, + on_stdout=on_stdout, + on_stderr=on_stderr, + ) + + +def _normalize_command(command: Union[str, List[str]], *args: str) -> str: + """Normalize command to a string, handling both string and list formats""" + if isinstance(command, str): + if args: + # Join command and args with proper quoting + return ( + shlex.quote(command) + " " + " ".join(shlex.quote(arg) for arg in args) + ) + return command + else: + # List of commands - join them for shell execution + all_args = list(command) + list(args) + return " ".join(shlex.quote(arg) for arg in all_args) + + +def _build_shell_command( + command: Union[str, List[str]], + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None, +) -> List[str]: + """Build a shell command with environment variables and working directory""" + # If command is a string, use it as-is + if isinstance(command, str): + shell_cmd = command + else: + # If it's a list, join it as a shell command + shell_cmd = " ".join(shlex.quote(arg) for arg in command) + + # Build the full command with env and cwd + parts = [] + + if cwd: + parts.append(f"cd {shlex.quote(cwd)}") + + if env: + env_vars = [] + for key, value in env.items(): + escaped_key = shlex.quote(key) + escaped_value = shlex.quote(value) + env_vars.append(f"{escaped_key}={escaped_value}") + if env_vars: + shell_cmd = " ".join(env_vars) + " " + shell_cmd + + if parts: + shell_cmd = " && ".join(parts) + " && " + shell_cmd + + return ["sh", "-c", shell_cmd] + + +def _decode_base64_content(content: Union[str, bytes]) -> str: + """Safely decode base64 content with proper error handling""" + if isinstance(content, str): + try: + return base64.b64decode(content).decode("utf-8") + except (base64.binascii.Error, UnicodeDecodeError): + # If base64 decoding fails, return as-is (might be plain text) + return content + else: + return content.decode("utf-8") + + +def _process_websocket_message( + message: str, +) -> tuple[Optional[str], Optional[str], Optional[int], Optional[str], bool]: + """Process WebSocket message using SDK models + + Returns: + tuple: (stdout, stderr, exit_code, error, is_finished) + """ + try: + stream_result = StreamResultOfExecCommandReply.from_dict(json.loads(message)) + except (json.JSONDecodeError, ValueError) as e: + return None, None, None, f"Failed to parse WebSocket message: {e}", False + + if stream_result.result: + result = stream_result.result + stdout = "" + stderr = "" + exit_code = None + is_finished = False + + if result.stdout and result.stdout.data: + stdout = _decode_base64_content(result.stdout.data) + + if result.stderr and result.stderr.data: + stderr = _decode_base64_content(result.stderr.data) + + if result.exit_code is not None: + exit_code = result.exit_code + # Only mark as finished if exited flag is explicitly set + # Otherwise, we might get exit_code but still have more output + if hasattr(result, "exited") and result.exited: + is_finished = True + # If exit_code is set but exited is not, don't mark as finished yet + # to allow for more output chunks + + return stdout, stderr, exit_code, None, is_finished + + elif stream_result.error: + error_msg = stream_result.error.message or "Unknown error" + return None, None, None, f"API Error: {error_msg}", True + + return None, None, None, None, False + + +def _get_websocket_url_and_headers( + instance_id: str, api_token: Optional[str] = None +) -> tuple[str, Dict[str, str]]: + """ + Get WebSocket URL and headers using SDK API client configuration. + + Args: + instance_id: The instance ID + api_token: API token (if None, will use get_api_client which reads from env) + + Returns: + Tuple of (websocket_url, headers_dict) + """ + _, _, instances_api = get_api_client(api_token) + api_client = instances_api.api_client + config = api_client.configuration + + host = config.host.replace("https://", "wss://").replace("http://", "ws://") + ws_url = f"{host}/v1/streams/instances/exec?id={instance_id}" + + headers = {} + auth_token = config.get_api_key_with_prefix("Bearer") + if auth_token: + headers["Authorization"] = auth_token + + return ws_url, headers + + +async def _execute_websocket_command( + instance_id: str, + command: List[str], + api_token: Optional[str] = None, + input_data: Optional[str] = None, + timeout: int = 30, + on_stdout: Optional[Callable[[str], None]] = None, + on_stderr: Optional[Callable[[str], None]] = None, +) -> CommandResult: + """Execute a command via WebSocket with proper timeout handling""" + start_time = time.time() + + ws_url, headers = _get_websocket_url_and_headers(instance_id, api_token) + + _, _, instances_api = get_api_client(api_token) + api_token_for_subprotocol = instances_api.api_client.configuration.api_key.get( + "Bearer" + ) + + try: + async with asyncio.timeout(timeout): + async with websockets.connect( + ws_url, + additional_headers=headers, + subprotocols=( + ["Bearer", api_token_for_subprotocol] + if api_token_for_subprotocol + else None + ), + ) as websocket: + command_frame = { + "id": instance_id, + "body": {"command": command}, + } + await websocket.send(json.dumps(command_frame)) + + if input_data: + input_frame = { + "id": instance_id, + "body": { + "stdin": { + "data": base64.b64encode( + input_data.encode("utf-8") + ).decode("utf-8"), + "close": True, + } + }, + } + await websocket.send(json.dumps(input_frame)) + + stdout_data = [] + stderr_data = [] + exit_code = 0 + + async for message in websocket: + stdout, stderr, cmd_exit_code, error, is_finished = ( + _process_websocket_message(message) + ) + + if error: + stderr_data.append(error) + if on_stderr: + on_stderr(error) + if "API Error" in error: + exit_code = 1 + break + continue + + # Process stdout first (may come with exit_code in same message) + if stdout: + stdout_data.append(stdout) + if on_stdout: + on_stdout(stdout) + + # Process stderr first (may come with exit_code in same message) + if stderr: + stderr_data.append(stderr) + if on_stderr: + on_stderr(stderr) + + # Store exit code but don't break yet - there might be more output + if cmd_exit_code is not None: + exit_code = cmd_exit_code + + # Only break when explicitly finished - continue processing until all output is received + if is_finished: + break + # If we have exit code but websocket closes naturally, that's fine too + + return CommandResult( + stdout="".join(stdout_data), + stderr="".join(stderr_data), + exit_code=exit_code, + status=( + CommandStatus.FINISHED + if exit_code == 0 + else CommandStatus.FAILED + ), + duration=time.time() - start_time, + command=command[0] if command else "", + args=command[1:] if len(command) > 1 else [], + ) + + except asyncio.TimeoutError: + return CommandResult( + stdout="", + stderr=f"Command timed out after {timeout} seconds", + exit_code=1, + status=CommandStatus.FAILED, + duration=time.time() - start_time, + command=command[0] if command else "", + args=command[1:] if len(command) > 1 else [], + ) + except Exception as e: + return CommandResult( + stdout="", + stderr=f"Command execution failed: {str(e)}", + exit_code=1, + status=CommandStatus.FAILED, + duration=time.time() - start_time, + command=command[0] if command else "", + args=command[1:] if len(command) > 1 else [], + ) + + +async def _exec_async( + instance_id: str, + command: Union[str, List[str]], + *args: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + timeout: int = 30, + api_token: Optional[str] = None, + on_stdout: Optional[Callable[[str], None]] = None, + on_stderr: Optional[Callable[[str], None]] = None, +) -> CommandResult: + """ + Execute a command in a shell via WebSocket connection to Koyeb API. + + Internal function - use sandbox.exec() for the public API. This function handles + command normalization and delegates to the WebSocket execution handler. + + Supports streaming output via on_stdout/on_stderr callbacks. + """ + full_cmd = _normalize_command(command, *args) + shell_command = _build_shell_command(full_cmd, cwd, env) + + return await _execute_websocket_command( + instance_id=instance_id, + command=shell_command, + api_token=api_token, + timeout=timeout, + on_stdout=on_stdout, + on_stderr=on_stderr, + ) diff --git a/koyeb/sandbox/filesystem.py b/koyeb/sandbox/filesystem.py new file mode 100644 index 00000000..4382d2a8 --- /dev/null +++ b/koyeb/sandbox/filesystem.py @@ -0,0 +1,447 @@ +# coding: utf-8 + +""" +Filesystem operations for Koyeb Sandbox instances +Using only the primitives available in the Koyeb API +""" + +import base64 +import os +import shlex +from dataclasses import dataclass +from typing import Dict, List, Union + +from .exec import _exec_async +from .utils import SandboxError, ensure_sandbox_healthy + + +class SandboxFilesystemError(SandboxError): + """Base exception for filesystem operations""" + + +class FileNotFoundError(SandboxFilesystemError): + """Raised when file or directory not found""" + + +class FileExistsError(SandboxFilesystemError): + """Raised when file already exists""" + + +@dataclass +class FileInfo: + """File information""" + + content: str + encoding: str + + +class SandboxFilesystem: + """ + Filesystem operations for Koyeb Sandbox instances + Using only the primitives available in the Koyeb API + """ + + def __init__(self, sandbox): + self.sandbox = sandbox + + async def write_file( + self, path: str, content: Union[str, bytes], encoding: str = "utf-8" + ) -> None: + """ + Write content to a file. + + Args: + path: Absolute path to the file + content: Content to write (string or bytes) + encoding: File encoding (default: "utf-8"). Use "base64" for binary data. + """ + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + + escaped_path = shlex.quote(path) + + if isinstance(content, bytes): + content_str = content.decode("utf-8", errors="replace") + else: + content_str = content + + if encoding == "base64": + content_b64 = content_str + else: + content_b64 = base64.b64encode(content_str.encode("utf-8")).decode("utf-8") + + escaped_b64 = shlex.quote(content_b64) + + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=f"printf '%s' {escaped_b64} | base64 -d > {escaped_path}", + api_token=self.sandbox.api_token, + ) + + if not result.success: + if "Permission denied" in result.stderr: + raise SandboxFilesystemError(f"Permission denied: {path}") + raise SandboxFilesystemError(f"Failed to write file: {result.stderr}") + + async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: + """ + Read a file from the sandbox. + + Args: + path: Absolute path to the file + encoding: File encoding (default: "utf-8"). Use "base64" for binary data. + + Returns: + FileInfo: Object with content and encoding + """ + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + + escaped_path = shlex.quote(path) + + if encoding == "base64": + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=f"base64 < {escaped_path}", + api_token=self.sandbox.api_token, + ) + else: + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=f"cat {escaped_path}", + api_token=self.sandbox.api_token, + ) + + if not result.success: + if "No such file or directory" in result.stderr: + raise FileNotFoundError(f"File not found: {path}") + if "Permission denied" in result.stderr: + raise SandboxFilesystemError(f"Permission denied: {path}") + raise SandboxFilesystemError(f"Failed to read file: {result.stderr}") + + return FileInfo(content=result.stdout.strip(), encoding=encoding) + + async def mkdir(self, path: str, recursive: bool = False) -> None: + """ + Create a directory. + + Args: + path: Absolute path to the directory + recursive: Create parent directories if needed (default: False) + """ + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + + if recursive: + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["mkdir", "-p", path], + api_token=self.sandbox.api_token, + ) + else: + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["mkdir", path], + api_token=self.sandbox.api_token, + ) + + if not result.success: + if "File exists" in result.stderr: + raise FileExistsError(f"Directory already exists: {path}") + if "Permission denied" in result.stderr: + raise SandboxFilesystemError(f"Permission denied: {path}") + raise SandboxFilesystemError(f"Failed to create directory: {result.stderr}") + + async def list_dir(self, path: str = ".") -> List[str]: + """ + List contents of a directory. + + Args: + path: Path to the directory (default: current directory) + + Returns: + List[str]: Names of files and directories within the specified path. + """ + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["ls", "-A", path], + api_token=self.sandbox.api_token, + ) + + if not result.success: + if "No such file or directory" in result.stderr: + raise FileNotFoundError(f"Directory not found: {path}") + if "Permission denied" in result.stderr: + raise SandboxFilesystemError(f"Permission denied: {path}") + raise SandboxFilesystemError(f"Failed to list directory: {result.stderr}") + + return [item for item in result.stdout.splitlines() if item] + + async def delete_file(self, path: str) -> None: + """ + Delete a file. + + Args: + path: Absolute path to the file + """ + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["rm", path], + api_token=self.sandbox.api_token, + ) + + if not result.success: + if "No such file or directory" in result.stderr: + raise FileNotFoundError(f"File not found: {path}") + if "Permission denied" in result.stderr: + raise SandboxFilesystemError(f"Permission denied: {path}") + raise SandboxFilesystemError(f"Failed to delete file: {result.stderr}") + + async def delete_dir(self, path: str) -> None: + """ + Delete a directory. + + Args: + path: Absolute path to the directory + """ + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["rmdir", path], + api_token=self.sandbox.api_token, + ) + + if not result.success: + if "No such file or directory" in result.stderr: + raise FileNotFoundError(f"Directory not found: {path}") + if "Directory not empty" in result.stderr: + raise SandboxFilesystemError(f"Directory not empty: {path}") + if "Permission denied" in result.stderr: + raise SandboxFilesystemError(f"Permission denied: {path}") + raise SandboxFilesystemError(f"Failed to delete directory: {result.stderr}") + + async def rename_file(self, old_path: str, new_path: str) -> None: + """ + Rename a file. + + Args: + old_path: Current file path + new_path: New file path + """ + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["mv", old_path, new_path], + api_token=self.sandbox.api_token, + ) + + if not result.success: + if "No such file or directory" in result.stderr: + raise FileNotFoundError(f"File not found: {old_path}") + if "Permission denied" in result.stderr: + raise SandboxFilesystemError(f"Permission denied: {old_path}") + raise SandboxFilesystemError(f"Failed to rename file: {result.stderr}") + + async def move_file(self, source_path: str, destination_path: str) -> None: + """ + Move a file to a different directory. + + Args: + source_path: Current file path + destination_path: Destination path + """ + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["mv", source_path, destination_path], + api_token=self.sandbox.api_token, + ) + + if not result.success: + if "No such file or directory" in result.stderr: + raise FileNotFoundError(f"File not found: {source_path}") + if "Permission denied" in result.stderr: + raise SandboxFilesystemError(f"Permission denied: {source_path}") + raise SandboxFilesystemError(f"Failed to move file: {result.stderr}") + + async def write_files(self, files: List[Dict[str, str]]) -> None: + """ + Write multiple files in a single operation. + + Args: + files: List of dictionaries, each with 'path', 'content', and optional 'encoding'. + """ + for file_info in files: + path = file_info["path"] + content = file_info["content"] + encoding = file_info.get("encoding", "utf-8") + await self.write_file(path, content, encoding) + + async def exists(self, path: str) -> bool: + """Check if file/directory exists""" + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["test", "-e", path], + api_token=self.sandbox.api_token, + ) + return result.success + + async def is_file(self, path: str) -> bool: + """Check if path is a file""" + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["test", "-f", path], + api_token=self.sandbox.api_token, + ) + return result.success + + async def is_dir(self, path: str) -> bool: + """Check if path is a directory""" + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["test", "-d", path], + api_token=self.sandbox.api_token, + ) + return result.success + + async def upload_file(self, local_path: str, remote_path: str) -> None: + """ + Upload a local file to the sandbox. + + Args: + local_path: Path to the local file + remote_path: Destination path in the sandbox + """ + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + + if not os.path.exists(local_path): + raise FileNotFoundError(f"Local file not found: {local_path}") + + with open(local_path, "rb") as f: + content = base64.b64encode(f.read()).decode("utf-8") + + await self.write_file(remote_path, content, encoding="base64") + + async def download_file(self, remote_path: str, local_path: str) -> None: + """ + Download a file from the sandbox to a local path. + + Args: + remote_path: Path to the file in the sandbox + local_path: Destination path on the local filesystem + """ + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + + file_info = await self.read_file(remote_path, encoding="base64") + content = base64.b64decode(file_info.content) + + with open(local_path, "wb") as f: + f.write(content) + + async def ls(self, path: str = ".") -> List[str]: + """ + List directory contents. + + Args: + path: Path to list + + Returns: + List of file/directory names + """ + return await self.list_dir(path) + + async def rm(self, path: str, recursive: bool = False) -> None: + """ + Remove file or directory. + + Args: + path: Path to remove + recursive: Remove recursively + """ + ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) + + if recursive: + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["rm", "-rf", path], + api_token=self.sandbox.api_token, + ) + else: + result = await _exec_async( + instance_id=self.sandbox.instance_id, + command=["rm", path], + api_token=self.sandbox.api_token, + ) + + if not result.success: + if "No such file or directory" in result.stderr: + raise FileNotFoundError(f"File not found: {path}") + raise SandboxFilesystemError(f"Failed to remove: {result.stderr}") + + def open(self, path: str, mode: str = "r") -> "SandboxFileIO": + """ + Open a file in the sandbox. + + Args: + path: Path to the file + mode: Open mode ('r', 'w', 'a', etc.) + + Returns: + SandboxFileIO: File handle + """ + return SandboxFileIO(self, path, mode) + + +class SandboxFileIO: + """File I/O handle for sandbox files""" + + def __init__(self, filesystem: SandboxFilesystem, path: str, mode: str): + self.filesystem = filesystem + self.path = path + self.mode = mode + self._closed = False + + async def read(self) -> str: + """Read file content""" + if "r" not in self.mode: + raise ValueError("File not opened for reading") + + if self._closed: + raise ValueError("File is closed") + + file_info = await self.filesystem.read_file(self.path) + return file_info.content + + async def write(self, content: str) -> None: + """Write content to file""" + if "w" not in self.mode and "a" not in self.mode: + raise ValueError("File not opened for writing") + + if self._closed: + raise ValueError("File is closed") + + if "a" in self.mode: + try: + existing = await self.filesystem.read_file(self.path) + content = existing.content + content + except FileNotFoundError: + pass + + await self.filesystem.write_file(self.path, content) + + def close(self) -> None: + """Close the file""" + self._closed = True + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py new file mode 100644 index 00000000..6e62e7b4 --- /dev/null +++ b/koyeb/sandbox/sandbox.py @@ -0,0 +1,248 @@ +# coding: utf-8 + +""" +Async Koyeb Sandbox - Python SDK for creating and managing Koyeb sandboxes asynchronously +""" + +import asyncio +import time +from typing import Dict, List, Optional + +from koyeb.api.models.create_app import CreateApp +from koyeb.api.models.deployment_port import DeploymentPort + +from .utils import ( + build_env_vars, + create_deployment_definition, + create_docker_source, + get_api_client, + is_sandbox_healthy, +) + + +class Sandbox: + """ + Async sandbox for running code on Koyeb infrastructure. + Provides async creation and deletion functionality with proper health polling. + """ + + def __init__( + self, + sandbox_id: str, + app_id: str, + service_id: str, + instance_id: str, + name: Optional[str] = None, + api_token: Optional[str] = None, + ): + self.sandbox_id = sandbox_id + self.app_id = app_id + self.service_id = service_id + self.instance_id = instance_id + self.name = name + self.api_token = api_token + self._created_at = time.time() + + @classmethod + async def create( + cls, + image: str = "docker.io/library/ubuntu:latest", + name: str = "quick-sandbox", + wait_ready: bool = True, + instance_type: str = "nano", + ports: Optional[List[DeploymentPort]] = None, + env: Optional[Dict[str, str]] = None, + regions: Optional[List[str]] = None, + api_token: Optional[str] = None, + timeout: int = 300, + ) -> "Sandbox": + """ + Create a new sandbox instance with async support. + + Args: + image: Docker image to use (default: ubuntu:latest) + name: Name of the sandbox + wait_ready: Wait for sandbox to be ready (default: True) + instance_type: Instance type (default: nano) + ports: List of ports to expose + env: Environment variables + regions: List of regions to deploy to (default: ["na"]) + api_token: Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) + timeout: Timeout for sandbox creation in seconds + + Returns: + Sandbox: A new Sandbox instance + """ + if api_token is None: + import os + + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + raise ValueError( + "API token is required. Set KOYEB_API_TOKEN environment variable or pass api_token parameter" + ) + + loop = asyncio.get_running_loop() + sandbox = await loop.run_in_executor( + None, + lambda: cls._create_sync( + name=name, + image=image, + instance_type=instance_type, + ports=ports, + env=env, + regions=regions, + api_token=api_token, + timeout=timeout, + ), + ) + + if wait_ready: + await sandbox.wait_ready(timeout=timeout) + + return sandbox + + @classmethod + def _create_sync( + cls, + name: str, + image: str = "docker.io/library/ubuntu:latest", + instance_type: str = "nano", + ports: Optional[List[DeploymentPort]] = None, + env: Optional[Dict[str, str]] = None, + regions: Optional[List[str]] = None, + api_token: Optional[str] = None, + timeout: int = 300, + ) -> "Sandbox": + """Synchronous creation method""" + apps_api, services_api, _ = get_api_client(api_token) + + app_name = f"sandbox-app-{name}-{int(time.time())}" + app_response = apps_api.create_app(app=CreateApp(name=app_name)) + app_id = app_response.app.id + + env_vars = build_env_vars(env) + docker_source = create_docker_source(image, ["sleep", "infinity"]) + deployment_definition = create_deployment_definition( + name=f"sandbox-service-{name}", + docker_source=docker_source, + env_vars=env_vars, + instance_type=instance_type, + ports=ports, + regions=regions, + ) + + from koyeb.api.models.create_service import CreateService + + create_service = CreateService(app_id=app_id, definition=deployment_definition) + service_response = services_api.create_service(service=create_service) + service_id = service_response.service.id + deployment_id = service_response.service.latest_deployment_id + + from koyeb.api.api.deployments_api import DeploymentsApi + + deployments_api = DeploymentsApi(services_api.api_client) + + max_wait = min(timeout // 2, 60) if timeout > 60 else timeout + wait_interval = 0.5 + start_time = time.time() + + while time.time() - start_time < max_wait: + try: + scaling_response = deployments_api.get_deployment_scaling( + id=deployment_id + ) + + if scaling_response.replicas and scaling_response.replicas[0].instances: + instance_id = scaling_response.replicas[0].instances[0].id + break + else: + print( + f"Waiting for instances to be created... (elapsed: {time.time() - start_time:.1f}s)" + ) + time.sleep(wait_interval) + except Exception as e: + print(f"Error getting deployment scaling: {e}") + time.sleep(wait_interval) + else: + raise Exception( + f"No instances found in deployment after {max_wait} seconds" + ) + + sandbox = cls( + sandbox_id=name, + app_id=app_id, + service_id=service_id, + instance_id=instance_id, + name=name, + api_token=api_token, + ) + return sandbox + + async def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: + """ + Wait for sandbox to become ready with proper async polling. + + Args: + timeout: Maximum time to wait in seconds + poll_interval: Time between health checks in seconds + + Returns: + bool: True if sandbox became ready, False if timeout + """ + start_time = time.time() + + while time.time() - start_time < timeout: + loop = asyncio.get_running_loop() + is_healthy = await loop.run_in_executor( + None, is_sandbox_healthy, self.instance_id, self.api_token + ) + + if is_healthy: + return True + + await asyncio.sleep(poll_interval) + + return False + + async def delete(self) -> None: + """Delete the sandbox instance asynchronously.""" + loop = asyncio.get_running_loop() + await loop.run_in_executor(None, self._delete_sync) + + def _delete_sync(self) -> None: + """Synchronous deletion method""" + apps_api, services_api, _ = get_api_client(self.api_token) + services_api.delete_service(self.service_id) + apps_api.delete_app(self.app_id) + + async def status(self) -> str: + """Get current sandbox status asynchronously""" + from .utils import get_sandbox_status + + loop = asyncio.get_running_loop() + status = await loop.run_in_executor( + None, get_sandbox_status, self.instance_id, self.api_token + ) + return status.value + + async def is_healthy(self) -> bool: + """Check if sandbox is healthy and ready for operations asynchronously""" + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, is_sandbox_healthy, self.instance_id, self.api_token + ) + + @property + def filesystem(self): + """Get filesystem operations interface""" + from .filesystem import SandboxFilesystem + + return SandboxFilesystem(self) + + @property + def exec(self): + """Get command execution interface""" + from .exec import SandboxExecutor + + return SandboxExecutor(self) diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py new file mode 100644 index 00000000..018a0ee4 --- /dev/null +++ b/koyeb/sandbox/utils.py @@ -0,0 +1,159 @@ +# coding: utf-8 + +""" +Utility functions for Koyeb Sandbox +""" + +import os +from typing import Dict, List, Optional + +from koyeb.api import ApiClient, Configuration +from koyeb.api.api import AppsApi, InstancesApi, ServicesApi +from koyeb.api.exceptions import ApiException, NotFoundException +from koyeb.api.models.deployment_definition import DeploymentDefinition +from koyeb.api.models.deployment_definition_type import DeploymentDefinitionType +from koyeb.api.models.deployment_env import DeploymentEnv +from koyeb.api.models.deployment_instance_type import DeploymentInstanceType +from koyeb.api.models.deployment_port import DeploymentPort +from koyeb.api.models.deployment_scaling import DeploymentScaling +from koyeb.api.models.docker_source import DockerSource +from koyeb.api.models.instance_status import InstanceStatus + + +def get_api_client( + api_token: Optional[str] = None, host: str = "https://app.koyeb.com" +) -> tuple[AppsApi, ServicesApi, InstancesApi]: + """ + Get configured API clients for Koyeb operations. + + Args: + api_token: Koyeb API token. If not provided, will try to get from KOYEB_API_TOKEN env var + host: Koyeb API host URL + + Returns: + Tuple of (AppsApi, ServicesApi, InstancesApi) instances + + Raises: + ValueError: If API token is not provided + """ + token = api_token or os.getenv("KOYEB_API_TOKEN") + if not token: + raise ValueError( + "API token is required. Set KOYEB_API_TOKEN environment variable or pass api_token parameter" + ) + + configuration = Configuration(host=host) + configuration.api_key["Bearer"] = token + configuration.api_key_prefix["Bearer"] = "Bearer" + + api_client = ApiClient(configuration) + return AppsApi(api_client), ServicesApi(api_client), InstancesApi(api_client) + + +def build_env_vars(env: Optional[Dict[str, str]]) -> List[DeploymentEnv]: + """ + Build environment variables list from dictionary. + + Args: + env: Dictionary of environment variables + + Returns: + List of DeploymentEnv objects + """ + env_vars = [] + if env: + for key, value in env.items(): + env_vars.append(DeploymentEnv(key=key, value=value)) + return env_vars + + +def create_docker_source(image: str, command_args: List[str]) -> DockerSource: + """ + Create Docker source configuration. + + Args: + image: Docker image name + command_args: Command and arguments to run + + Returns: + DockerSource object + """ + return DockerSource( + image=image, + command=command_args[0] if command_args else None, + args=list(command_args[1:]) if len(command_args) > 1 else None, + ) + + +def create_deployment_definition( + name: str, + docker_source: DockerSource, + env_vars: List[DeploymentEnv], + instance_type: str, + ports: Optional[List[DeploymentPort]] = None, + regions: List[str] = None, +) -> DeploymentDefinition: + """ + Create deployment definition for a sandbox service. + + Args: + name: Service name + docker_source: Docker configuration + env_vars: Environment variables + instance_type: Instance type + ports: List of ports (if provided, type becomes WEB, otherwise WORKER) + regions: List of regions (defaults to North America) + + Returns: + DeploymentDefinition object + """ + if regions is None: + regions = ["na"] + + deployment_type = ( + DeploymentDefinitionType.WEB if ports else DeploymentDefinitionType.WORKER + ) + + return DeploymentDefinition( + name=name, + type=deployment_type, + docker=docker_source, + env=env_vars, + ports=ports, + instance_types=[DeploymentInstanceType(type=instance_type)], + scalings=[DeploymentScaling(min=1, max=1)], + regions=regions, + ) + + +def get_sandbox_status( + instance_id: str, api_token: Optional[str] = None +) -> InstanceStatus: + """Get the current status of a sandbox instance.""" + try: + _, _, instances_api = get_api_client(api_token) + instance_response = instances_api.get_instance(instance_id) + return instance_response.instance.status + except (NotFoundException, ApiException, Exception): + return InstanceStatus.ERROR + + +def is_sandbox_healthy(instance_id: str, api_token: Optional[str] = None) -> bool: + """Check if sandbox is healthy and ready for operations.""" + return get_sandbox_status(instance_id, api_token) == InstanceStatus.HEALTHY + + +def ensure_sandbox_healthy(instance_id: str, api_token: Optional[str] = None) -> None: + """Ensure a sandbox instance is healthy, raising an exception if not.""" + status = get_sandbox_status(instance_id, api_token) + + if status == InstanceStatus.ERROR: + raise SandboxError("Sandbox is in error state") + elif status in [InstanceStatus.STOPPING, InstanceStatus.STOPPED]: + raise SandboxError(f"Sandbox is {status.value}, cannot perform operations") + elif status != InstanceStatus.HEALTHY: + raise SandboxError(f"Sandbox is not healthy (status: {status.value})") + + +class SandboxError(Exception): + """Base exception for sandbox operations""" diff --git a/pyproject.toml b/pyproject.toml index 128bd60c..4a083f8d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,7 @@ dependencies = [ "python-dateutil (>=2.8.2)", "pydantic (>=2)", "typing-extensions (>=4.7.1)", + "websockets>=15.0.1", ] [project.urls] @@ -39,6 +40,7 @@ build-backend = "setuptools.build_meta" [dependency-groups] dev = [ + "autopep8>=2.3.2", "flake8>=7.3.0", "pydoc-markdown>=4.8.2", "pytest>=8.4.2", diff --git a/uv.lock b/uv.lock index 9fab16bb..d6ddba70 100644 --- a/uv.lock +++ b/uv.lock @@ -15,6 +15,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] +[[package]] +name = "autopep8" +version = "2.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycodestyle" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/d8/30873d2b7b57dee9263e53d142da044c4600a46f2d28374b3e38b023df16/autopep8-2.3.2.tar.gz", hash = "sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758", size = 92210, upload-time = "2025-01-14T14:46:18.454Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/43/53afb8ba17218f19b77c7834128566c5bbb100a0ad9ba2e8e89d089d7079/autopep8-2.3.2-py2.py3-none-any.whl", hash = "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128", size = 45807, upload-time = "2025-01-14T14:46:15.466Z" }, +] + [[package]] name = "black" version = "23.12.1" @@ -363,17 +376,19 @@ wheels = [ [[package]] name = "koyeb-sdk" -version = "1.0.2" +version = "1.0.3" source = { editable = "." } dependencies = [ { name = "pydantic" }, { name = "python-dateutil" }, { name = "typing-extensions" }, { name = "urllib3" }, + { name = "websockets" }, ] [package.dev-dependencies] dev = [ + { name = "autopep8" }, { name = "flake8" }, { name = "pydoc-markdown" }, { name = "pytest" }, @@ -385,10 +400,12 @@ requires-dist = [ { name = "python-dateutil", specifier = ">=2.8.2" }, { name = "typing-extensions", specifier = ">=4.7.1" }, { name = "urllib3", specifier = ">=2.1.0,<3.0.0" }, + { name = "websockets", specifier = ">=15.0.1" }, ] [package.metadata.requires-dev] dev = [ + { name = "autopep8", specifier = ">=2.3.2" }, { name = "flake8", specifier = ">=7.3.0" }, { name = "pydoc-markdown", specifier = ">=4.8.2" }, { name = "pytest", specifier = ">=8.4.2" }, @@ -1059,6 +1076,82 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, ] +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/da/6462a9f510c0c49837bbc9345aca92d767a56c1fb2939e1579df1e1cdcf7/websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b", size = 175423, upload-time = "2025-03-05T20:01:35.363Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9f/9d11c1a4eb046a9e106483b9ff69bce7ac880443f00e5ce64261b47b07e7/websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205", size = 173080, upload-time = "2025-03-05T20:01:37.304Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4f/b462242432d93ea45f297b6179c7333dd0402b855a912a04e7fc61c0d71f/websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a", size = 173329, upload-time = "2025-03-05T20:01:39.668Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0c/6afa1f4644d7ed50284ac59cc70ef8abd44ccf7d45850d989ea7310538d0/websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e", size = 182312, upload-time = "2025-03-05T20:01:41.815Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d4/ffc8bd1350b229ca7a4db2a3e1c482cf87cea1baccd0ef3e72bc720caeec/websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf", size = 181319, upload-time = "2025-03-05T20:01:43.967Z" }, + { url = "https://files.pythonhosted.org/packages/97/3a/5323a6bb94917af13bbb34009fac01e55c51dfde354f63692bf2533ffbc2/websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb", size = 181631, upload-time = "2025-03-05T20:01:46.104Z" }, + { url = "https://files.pythonhosted.org/packages/a6/cc/1aeb0f7cee59ef065724041bb7ed667b6ab1eeffe5141696cccec2687b66/websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d", size = 182016, upload-time = "2025-03-05T20:01:47.603Z" }, + { url = "https://files.pythonhosted.org/packages/79/f9/c86f8f7af208e4161a7f7e02774e9d0a81c632ae76db2ff22549e1718a51/websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9", size = 181426, upload-time = "2025-03-05T20:01:48.949Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b9/828b0bc6753db905b91df6ae477c0b14a141090df64fb17f8a9d7e3516cf/websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c", size = 181360, upload-time = "2025-03-05T20:01:50.938Z" }, + { url = "https://files.pythonhosted.org/packages/89/fb/250f5533ec468ba6327055b7d98b9df056fb1ce623b8b6aaafb30b55d02e/websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256", size = 176388, upload-time = "2025-03-05T20:01:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/1c/46/aca7082012768bb98e5608f01658ff3ac8437e563eca41cf068bd5849a5e/websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41", size = 176830, upload-time = "2025-03-05T20:01:53.922Z" }, + { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" }, + { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" }, + { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" }, + { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" }, + { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" }, + { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" }, + { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" }, + { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" }, + { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" }, + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/36/db/3fff0bcbe339a6fa6a3b9e3fbc2bfb321ec2f4cd233692272c5a8d6cf801/websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5", size = 175424, upload-time = "2025-03-05T20:02:56.505Z" }, + { url = "https://files.pythonhosted.org/packages/46/e6/519054c2f477def4165b0ec060ad664ed174e140b0d1cbb9fafa4a54f6db/websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a", size = 173077, upload-time = "2025-03-05T20:02:58.37Z" }, + { url = "https://files.pythonhosted.org/packages/1a/21/c0712e382df64c93a0d16449ecbf87b647163485ca1cc3f6cbadb36d2b03/websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b", size = 173324, upload-time = "2025-03-05T20:02:59.773Z" }, + { url = "https://files.pythonhosted.org/packages/1c/cb/51ba82e59b3a664df54beed8ad95517c1b4dc1a913730e7a7db778f21291/websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770", size = 182094, upload-time = "2025-03-05T20:03:01.827Z" }, + { url = "https://files.pythonhosted.org/packages/fb/0f/bf3788c03fec679bcdaef787518dbe60d12fe5615a544a6d4cf82f045193/websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb", size = 181094, upload-time = "2025-03-05T20:03:03.123Z" }, + { url = "https://files.pythonhosted.org/packages/5e/da/9fb8c21edbc719b66763a571afbaf206cb6d3736d28255a46fc2fe20f902/websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054", size = 181397, upload-time = "2025-03-05T20:03:04.443Z" }, + { url = "https://files.pythonhosted.org/packages/2e/65/65f379525a2719e91d9d90c38fe8b8bc62bd3c702ac651b7278609b696c4/websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee", size = 181794, upload-time = "2025-03-05T20:03:06.708Z" }, + { url = "https://files.pythonhosted.org/packages/d9/26/31ac2d08f8e9304d81a1a7ed2851c0300f636019a57cbaa91342015c72cc/websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed", size = 181194, upload-time = "2025-03-05T20:03:08.844Z" }, + { url = "https://files.pythonhosted.org/packages/98/72/1090de20d6c91994cd4b357c3f75a4f25ee231b63e03adea89671cc12a3f/websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880", size = 181164, upload-time = "2025-03-05T20:03:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/2d/37/098f2e1c103ae8ed79b0e77f08d83b0ec0b241cf4b7f2f10edd0126472e1/websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411", size = 176381, upload-time = "2025-03-05T20:03:12.77Z" }, + { url = "https://files.pythonhosted.org/packages/75/8b/a32978a3ab42cebb2ebdd5b05df0696a09f4d436ce69def11893afa301f0/websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4", size = 176841, upload-time = "2025-03-05T20:03:14.367Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/d40f779fa16f74d3468357197af8d6ad07e7c5a27ea1ca74ceb38986f77a/websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3", size = 173109, upload-time = "2025-03-05T20:03:17.769Z" }, + { url = "https://files.pythonhosted.org/packages/bc/cd/5b887b8585a593073fd92f7c23ecd3985cd2c3175025a91b0d69b0551372/websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1", size = 173343, upload-time = "2025-03-05T20:03:19.094Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ae/d34f7556890341e900a95acf4886833646306269f899d58ad62f588bf410/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475", size = 174599, upload-time = "2025-03-05T20:03:21.1Z" }, + { url = "https://files.pythonhosted.org/packages/71/e6/5fd43993a87db364ec60fc1d608273a1a465c0caba69176dd160e197ce42/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9", size = 174207, upload-time = "2025-03-05T20:03:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/2b/fb/c492d6daa5ec067c2988ac80c61359ace5c4c674c532985ac5a123436cec/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04", size = 174155, upload-time = "2025-03-05T20:03:25.321Z" }, + { url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122", size = 176884, upload-time = "2025-03-05T20:03:27.934Z" }, + { url = "https://files.pythonhosted.org/packages/b7/48/4b67623bac4d79beb3a6bb27b803ba75c1bdedc06bd827e465803690a4b2/websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940", size = 173106, upload-time = "2025-03-05T20:03:29.404Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f0/adb07514a49fe5728192764e04295be78859e4a537ab8fcc518a3dbb3281/websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e", size = 173339, upload-time = "2025-03-05T20:03:30.755Z" }, + { url = "https://files.pythonhosted.org/packages/87/28/bd23c6344b18fb43df40d0700f6d3fffcd7cef14a6995b4f976978b52e62/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9", size = 174597, upload-time = "2025-03-05T20:03:32.247Z" }, + { url = "https://files.pythonhosted.org/packages/6d/79/ca288495863d0f23a60f546f0905ae8f3ed467ad87f8b6aceb65f4c013e4/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b", size = 174205, upload-time = "2025-03-05T20:03:33.731Z" }, + { url = "https://files.pythonhosted.org/packages/04/e4/120ff3180b0872b1fe6637f6f995bcb009fb5c87d597c1fc21456f50c848/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f", size = 174150, upload-time = "2025-03-05T20:03:35.757Z" }, + { url = "https://files.pythonhosted.org/packages/cb/c3/30e2f9c539b8da8b1d76f64012f3b19253271a63413b2d3adb94b143407f/websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123", size = 176877, upload-time = "2025-03-05T20:03:37.199Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +] + [[package]] name = "wrapt" version = "2.0.0" From 804ab2b56d38df51e583aaaef328fe521ae108db Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Mon, 3 Nov 2025 16:16:40 +0100 Subject: [PATCH 02/47] Allow host url to be defined via env as well --- koyeb/sandbox/utils.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index 018a0ee4..3ce506f4 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -21,14 +21,14 @@ def get_api_client( - api_token: Optional[str] = None, host: str = "https://app.koyeb.com" + api_token: Optional[str] = None, host: Optional[str] = None ) -> tuple[AppsApi, ServicesApi, InstancesApi]: """ Get configured API clients for Koyeb operations. Args: api_token: Koyeb API token. If not provided, will try to get from KOYEB_API_TOKEN env var - host: Koyeb API host URL + host: Koyeb API host URL. If not provided, will try to get from KOYEB_API_HOST env var (defaults to https://app.koyeb.com) Returns: Tuple of (AppsApi, ServicesApi, InstancesApi) instances @@ -42,7 +42,8 @@ def get_api_client( "API token is required. Set KOYEB_API_TOKEN environment variable or pass api_token parameter" ) - configuration = Configuration(host=host) + api_host = host or os.getenv("KOYEB_API_HOST", "https://app.koyeb.com") + configuration = Configuration(host=api_host) configuration.api_key["Bearer"] = token configuration.api_key_prefix["Bearer"] = "Bearer" From 97afcf4bc53b8c947fbea2b2db230d66144cd414 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Mon, 3 Nov 2025 16:16:52 +0100 Subject: [PATCH 03/47] Set default continent to EU --- koyeb/sandbox/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index 3ce506f4..e0eb7c7c 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -109,7 +109,7 @@ def create_deployment_definition( DeploymentDefinition object """ if regions is None: - regions = ["na"] + regions = ["eu"] deployment_type = ( DeploymentDefinitionType.WEB if ports else DeploymentDefinitionType.WORKER From ff8e97a4f25919c421a67c6e96e4d2494888c425 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Mon, 3 Nov 2025 16:35:37 +0100 Subject: [PATCH 04/47] Split module between async and sync sandbox --- koyeb/__init__.py | 4 +- koyeb/sandbox/__init__.py | 3 +- koyeb/sandbox/sandbox.py | 203 +++++++++++++++++++++++++++++--------- 3 files changed, 159 insertions(+), 51 deletions(-) diff --git a/koyeb/__init__.py b/koyeb/__init__.py index 1e21041c..ee580dbd 100644 --- a/koyeb/__init__.py +++ b/koyeb/__init__.py @@ -3,6 +3,6 @@ __version__ = "1.0.3" # Make Sandbox available at package level -from .sandbox import Sandbox +from .sandbox import Sandbox, AsyncSandbox -__all__ = ["Sandbox"] +__all__ = ["Sandbox", "AsyncSandbox"] diff --git a/koyeb/sandbox/__init__.py b/koyeb/sandbox/__init__.py index c0057c98..b69b02c4 100644 --- a/koyeb/sandbox/__init__.py +++ b/koyeb/sandbox/__init__.py @@ -10,7 +10,7 @@ from .exec import CommandResult, CommandStatus, SandboxCommandError, SandboxExecutor from .filesystem import FileInfo, SandboxFilesystem -from .sandbox import Sandbox +from .sandbox import Sandbox, AsyncSandbox from .utils import SandboxError __all__ = [ @@ -23,4 +23,5 @@ "CommandResult", "CommandStatus", "SandboxCommandError", + "AsyncSandbox", ] diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 6e62e7b4..cb2a35f3 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -1,7 +1,7 @@ # coding: utf-8 """ -Async Koyeb Sandbox - Python SDK for creating and managing Koyeb sandboxes asynchronously +Koyeb Sandbox - Python SDK for creating and managing Koyeb sandboxes """ import asyncio @@ -22,8 +22,8 @@ class Sandbox: """ - Async sandbox for running code on Koyeb infrastructure. - Provides async creation and deletion functionality with proper health polling. + Synchronous sandbox for running code on Koyeb infrastructure. + Provides creation and deletion functionality with proper health polling. """ def __init__( @@ -44,7 +44,7 @@ def __init__( self._created_at = time.time() @classmethod - async def create( + def create( cls, image: str = "docker.io/library/ubuntu:latest", name: str = "quick-sandbox", @@ -57,7 +57,7 @@ async def create( timeout: int = 300, ) -> "Sandbox": """ - Create a new sandbox instance with async support. + Create a new sandbox instance. Args: image: Docker image to use (default: ubuntu:latest) @@ -82,23 +82,19 @@ async def create( "API token is required. Set KOYEB_API_TOKEN environment variable or pass api_token parameter" ) - loop = asyncio.get_running_loop() - sandbox = await loop.run_in_executor( - None, - lambda: cls._create_sync( - name=name, - image=image, - instance_type=instance_type, - ports=ports, - env=env, - regions=regions, - api_token=api_token, - timeout=timeout, - ), + sandbox = cls._create_sync( + name=name, + image=image, + instance_type=instance_type, + ports=ports, + env=env, + regions=regions, + api_token=api_token, + timeout=timeout, ) if wait_ready: - await sandbox.wait_ready(timeout=timeout) + sandbox.wait_ready(timeout=timeout) return sandbox @@ -114,7 +110,10 @@ def _create_sync( api_token: Optional[str] = None, timeout: int = 300, ) -> "Sandbox": - """Synchronous creation method""" + """ + Synchronous creation method that returns creation parameters. + Subclasses can override to return their own type. + """ apps_api, services_api, _ = get_api_client(api_token) app_name = f"sandbox-app-{name}-{int(time.time())}" @@ -169,7 +168,7 @@ def _create_sync( f"No instances found in deployment after {max_wait} seconds" ) - sandbox = cls( + return cls( sandbox_id=name, app_id=app_id, service_id=service_id, @@ -177,11 +176,10 @@ def _create_sync( name=name, api_token=api_token, ) - return sandbox - async def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: + def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: """ - Wait for sandbox to become ready with proper async polling. + Wait for sandbox to become ready with proper polling. Args: timeout: Maximum time to wait in seconds @@ -193,45 +191,31 @@ async def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> boo start_time = time.time() while time.time() - start_time < timeout: - loop = asyncio.get_running_loop() - is_healthy = await loop.run_in_executor( - None, is_sandbox_healthy, self.instance_id, self.api_token - ) + is_healthy = is_sandbox_healthy(self.instance_id, self.api_token) if is_healthy: return True - await asyncio.sleep(poll_interval) + time.sleep(poll_interval) return False - async def delete(self) -> None: - """Delete the sandbox instance asynchronously.""" - loop = asyncio.get_running_loop() - await loop.run_in_executor(None, self._delete_sync) - - def _delete_sync(self) -> None: - """Synchronous deletion method""" + def delete(self) -> None: + """Delete the sandbox instance.""" apps_api, services_api, _ = get_api_client(self.api_token) services_api.delete_service(self.service_id) apps_api.delete_app(self.app_id) - async def status(self) -> str: - """Get current sandbox status asynchronously""" + def status(self) -> str: + """Get current sandbox status""" from .utils import get_sandbox_status - loop = asyncio.get_running_loop() - status = await loop.run_in_executor( - None, get_sandbox_status, self.instance_id, self.api_token - ) + status = get_sandbox_status(self.instance_id, self.api_token) return status.value - async def is_healthy(self) -> bool: - """Check if sandbox is healthy and ready for operations asynchronously""" - loop = asyncio.get_running_loop() - return await loop.run_in_executor( - None, is_sandbox_healthy, self.instance_id, self.api_token - ) + def is_healthy(self) -> bool: + """Check if sandbox is healthy and ready for operations""" + return is_sandbox_healthy(self.instance_id, self.api_token) @property def filesystem(self): @@ -246,3 +230,126 @@ def exec(self): from .exec import SandboxExecutor return SandboxExecutor(self) + + +class AsyncSandbox(Sandbox): + """ + Async sandbox for running code on Koyeb infrastructure. + Inherits from Sandbox and provides async wrappers for all operations. + """ + + @classmethod + async def create( + cls, + image: str = "docker.io/library/ubuntu:latest", + name: str = "quick-sandbox", + wait_ready: bool = True, + instance_type: str = "nano", + ports: Optional[List[DeploymentPort]] = None, + env: Optional[Dict[str, str]] = None, + regions: Optional[List[str]] = None, + api_token: Optional[str] = None, + timeout: int = 300, + ) -> "AsyncSandbox": + """ + Create a new sandbox instance with async support. + + Args: + image: Docker image to use (default: ubuntu:latest) + name: Name of the sandbox + wait_ready: Wait for sandbox to be ready (default: True) + instance_type: Instance type (default: nano) + ports: List of ports to expose + env: Environment variables + regions: List of regions to deploy to (default: ["na"]) + api_token: Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) + timeout: Timeout for sandbox creation in seconds + + Returns: + AsyncSandbox: A new AsyncSandbox instance + """ + if api_token is None: + import os + + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + raise ValueError( + "API token is required. Set KOYEB_API_TOKEN environment variable or pass api_token parameter" + ) + + loop = asyncio.get_running_loop() + sync_result = await loop.run_in_executor( + None, + lambda: Sandbox._create_sync( + name=name, + image=image, + instance_type=instance_type, + ports=ports, + env=env, + regions=regions, + api_token=api_token, + timeout=timeout, + ), + ) + + # Convert Sandbox instance to AsyncSandbox instance + sandbox = cls( + sandbox_id=sync_result.sandbox_id, + app_id=sync_result.app_id, + service_id=sync_result.service_id, + instance_id=sync_result.instance_id, + name=sync_result.name, + api_token=sync_result.api_token, + ) + sandbox._created_at = sync_result._created_at + + if wait_ready: + await sandbox.wait_ready(timeout=timeout) + + return sandbox + + async def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: + """ + Wait for sandbox to become ready with proper async polling. + + Args: + timeout: Maximum time to wait in seconds + poll_interval: Time between health checks in seconds + + Returns: + bool: True if sandbox became ready, False if timeout + """ + start_time = time.time() + + while time.time() - start_time < timeout: + loop = asyncio.get_running_loop() + is_healthy = await loop.run_in_executor( + None, super().is_healthy + ) + + if is_healthy: + return True + + await asyncio.sleep(poll_interval) + + return False + + async def delete(self) -> None: + """Delete the sandbox instance asynchronously.""" + loop = asyncio.get_running_loop() + await loop.run_in_executor(None, super().delete) + + async def status(self) -> str: + """Get current sandbox status asynchronously""" + loop = asyncio.get_running_loop() + status_value = await loop.run_in_executor( + None, super().status + ) + return status_value + + async def is_healthy(self) -> bool: + """Check if sandbox is healthy and ready for operations asynchronously""" + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, super().is_healthy + ) From 9c46985c85314f84a1b8a34e3d93c91accaa9c5a Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Mon, 3 Nov 2025 17:50:54 +0100 Subject: [PATCH 05/47] Update default Sandbox Executor to be sync and add async --- koyeb/sandbox/__init__.py | 11 +++++-- koyeb/sandbox/exec.py | 68 ++++++++++++++++++++++++++++++++++++--- koyeb/sandbox/sandbox.py | 7 ++++ 3 files changed, 80 insertions(+), 6 deletions(-) diff --git a/koyeb/sandbox/__init__.py b/koyeb/sandbox/__init__.py index b69b02c4..eb9d99df 100644 --- a/koyeb/sandbox/__init__.py +++ b/koyeb/sandbox/__init__.py @@ -8,20 +8,27 @@ from koyeb.api.models.instance_status import InstanceStatus as SandboxStatus -from .exec import CommandResult, CommandStatus, SandboxCommandError, SandboxExecutor +from .exec import ( + AsyncSandboxExecutor, + CommandResult, + CommandStatus, + SandboxCommandError, + SandboxExecutor, +) from .filesystem import FileInfo, SandboxFilesystem from .sandbox import Sandbox, AsyncSandbox from .utils import SandboxError __all__ = [ "Sandbox", + "AsyncSandbox", "SandboxFilesystem", "SandboxExecutor", + "AsyncSandboxExecutor", "FileInfo", "SandboxStatus", "SandboxError", "CommandResult", "CommandStatus", "SandboxCommandError", - "AsyncSandbox", ] diff --git a/koyeb/sandbox/exec.py b/koyeb/sandbox/exec.py index e53c5f82..fc77ce82 100644 --- a/koyeb/sandbox/exec.py +++ b/koyeb/sandbox/exec.py @@ -64,13 +64,73 @@ class SandboxCommandError(SandboxError): class SandboxExecutor: """ - Command execution interface for Koyeb Sandbox instances. + Synchronous command execution interface for Koyeb Sandbox instances. Bound to a specific sandbox instance. + + For async usage, use AsyncSandboxExecutor instead. """ def __init__(self, sandbox): self.sandbox = sandbox + def __call__( + self, + command: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + timeout: int = 30, + on_stdout: Optional[Callable[[str], None]] = None, + on_stderr: Optional[Callable[[str], None]] = None, + ) -> CommandResult: + """ + Execute a command in a shell synchronously. Supports streaming output via callbacks. + + Args: + command: Command to execute as a string (e.g., "python -c 'print(2+2)'") + cwd: Working directory for the command + env: Environment variables for the command + timeout: Command timeout in seconds + on_stdout: Optional callback for streaming stdout chunks + on_stderr: Optional callback for streaming stderr chunks + + Returns: + CommandResult: Result of the command execution + + Example: + ```python + # Synchronous execution + result = sandbox.exec("echo hello") + + # With streaming callbacks + result = sandbox.exec( + "echo hello; sleep 1; echo world", + on_stdout=lambda data: print(f"OUT: {data}"), + on_stderr=lambda data: print(f"ERR: {data}"), + ) + ``` + """ + return asyncio.run( + _exec_async( + instance_id=self.sandbox.instance_id, + command=command, + cwd=cwd, + env=env, + timeout=timeout, + api_token=self.sandbox.api_token, + on_stdout=on_stdout, + on_stderr=on_stderr, + ) + ) + + +class AsyncSandboxExecutor(SandboxExecutor): + """ + Async command execution interface for Koyeb Sandbox instances. + Bound to a specific sandbox instance. + + Inherits from SandboxExecutor and provides async command execution. + """ + async def __call__( self, command: str, @@ -81,7 +141,7 @@ async def __call__( on_stderr: Optional[Callable[[str], None]] = None, ) -> CommandResult: """ - Execute a command in a shell. Supports streaming output via callbacks. + Execute a command in a shell asynchronously. Supports streaming output via callbacks. Args: command: Command to execute as a string (e.g., "python -c 'print(2+2)'") @@ -96,10 +156,10 @@ async def __call__( Example: ```python - # Without streaming + # Async execution result = await sandbox.exec("echo hello") - # With streaming + # With streaming callbacks result = await sandbox.exec( "echo hello; sleep 1; echo world", on_stdout=lambda data: print(f"OUT: {data}"), diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index cb2a35f3..c7d14b83 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -353,3 +353,10 @@ async def is_healthy(self) -> bool: return await loop.run_in_executor( None, super().is_healthy ) + + @property + def exec(self): + """Get async command execution interface""" + from .exec import AsyncSandboxExecutor + + return AsyncSandboxExecutor(self) From b2d8f1e342727bea39f82d599809a8a430f00a1b Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 09:24:39 +0100 Subject: [PATCH 06/47] Update filesystem to have a sync and async variant --- koyeb/sandbox/filesystem.py | 370 +++++++++++++++++++++++++++++++----- 1 file changed, 326 insertions(+), 44 deletions(-) diff --git a/koyeb/sandbox/filesystem.py b/koyeb/sandbox/filesystem.py index 4382d2a8..ecb10cdc 100644 --- a/koyeb/sandbox/filesystem.py +++ b/koyeb/sandbox/filesystem.py @@ -5,6 +5,7 @@ Using only the primitives available in the Koyeb API """ +import asyncio import base64 import os import shlex @@ -37,24 +38,32 @@ class FileInfo: class SandboxFilesystem: """ - Filesystem operations for Koyeb Sandbox instances - Using only the primitives available in the Koyeb API + Synchronous filesystem operations for Koyeb Sandbox instances. + Using only the primitives available in the Koyeb API. + + For async usage, use AsyncSandboxFilesystem instead. """ def __init__(self, sandbox): self.sandbox = sandbox - async def write_file( + def write_file( self, path: str, content: Union[str, bytes], encoding: str = "utf-8" ) -> None: """ - Write content to a file. + Write content to a file synchronously. Args: path: Absolute path to the file content: Content to write (string or bytes) encoding: File encoding (default: "utf-8"). Use "base64" for binary data. """ + asyncio.run(self._write_file_async(path, content, encoding)) + + async def _write_file_async( + self, path: str, content: Union[str, bytes], encoding: str = "utf-8" + ) -> None: + """Internal async implementation for write_file""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) escaped_path = shlex.quote(path) @@ -82,9 +91,9 @@ async def write_file( raise SandboxFilesystemError(f"Permission denied: {path}") raise SandboxFilesystemError(f"Failed to write file: {result.stderr}") - async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: + def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: """ - Read a file from the sandbox. + Read a file from the sandbox synchronously. Args: path: Absolute path to the file @@ -93,6 +102,10 @@ async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: Returns: FileInfo: Object with content and encoding """ + return asyncio.run(self._read_file_async(path, encoding)) + + async def _read_file_async(self, path: str, encoding: str = "utf-8") -> FileInfo: + """Internal async implementation for read_file""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) escaped_path = shlex.quote(path) @@ -119,14 +132,18 @@ async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: return FileInfo(content=result.stdout.strip(), encoding=encoding) - async def mkdir(self, path: str, recursive: bool = False) -> None: + def mkdir(self, path: str, recursive: bool = False) -> None: """ - Create a directory. + Create a directory synchronously. Args: path: Absolute path to the directory recursive: Create parent directories if needed (default: False) """ + asyncio.run(self._mkdir_async(path, recursive)) + + async def _mkdir_async(self, path: str, recursive: bool = False) -> None: + """Internal async implementation for mkdir""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) if recursive: @@ -149,9 +166,9 @@ async def mkdir(self, path: str, recursive: bool = False) -> None: raise SandboxFilesystemError(f"Permission denied: {path}") raise SandboxFilesystemError(f"Failed to create directory: {result.stderr}") - async def list_dir(self, path: str = ".") -> List[str]: + def list_dir(self, path: str = ".") -> List[str]: """ - List contents of a directory. + List contents of a directory synchronously. Args: path: Path to the directory (default: current directory) @@ -159,6 +176,10 @@ async def list_dir(self, path: str = ".") -> List[str]: Returns: List[str]: Names of files and directories within the specified path. """ + return asyncio.run(self._list_dir_async(path)) + + async def _list_dir_async(self, path: str = ".") -> List[str]: + """Internal async implementation for list_dir""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) result = await _exec_async( @@ -176,13 +197,17 @@ async def list_dir(self, path: str = ".") -> List[str]: return [item for item in result.stdout.splitlines() if item] - async def delete_file(self, path: str) -> None: + def delete_file(self, path: str) -> None: """ - Delete a file. + Delete a file synchronously. Args: path: Absolute path to the file """ + asyncio.run(self._delete_file_async(path)) + + async def _delete_file_async(self, path: str) -> None: + """Internal async implementation for delete_file""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) result = await _exec_async( @@ -198,13 +223,17 @@ async def delete_file(self, path: str) -> None: raise SandboxFilesystemError(f"Permission denied: {path}") raise SandboxFilesystemError(f"Failed to delete file: {result.stderr}") - async def delete_dir(self, path: str) -> None: + def delete_dir(self, path: str) -> None: """ - Delete a directory. + Delete a directory synchronously. Args: path: Absolute path to the directory """ + asyncio.run(self._delete_dir_async(path)) + + async def _delete_dir_async(self, path: str) -> None: + """Internal async implementation for delete_dir""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) result = await _exec_async( @@ -222,14 +251,18 @@ async def delete_dir(self, path: str) -> None: raise SandboxFilesystemError(f"Permission denied: {path}") raise SandboxFilesystemError(f"Failed to delete directory: {result.stderr}") - async def rename_file(self, old_path: str, new_path: str) -> None: + def rename_file(self, old_path: str, new_path: str) -> None: """ - Rename a file. + Rename a file synchronously. Args: old_path: Current file path new_path: New file path """ + asyncio.run(self._rename_file_async(old_path, new_path)) + + async def _rename_file_async(self, old_path: str, new_path: str) -> None: + """Internal async implementation for rename_file""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) result = await _exec_async( @@ -245,14 +278,18 @@ async def rename_file(self, old_path: str, new_path: str) -> None: raise SandboxFilesystemError(f"Permission denied: {old_path}") raise SandboxFilesystemError(f"Failed to rename file: {result.stderr}") - async def move_file(self, source_path: str, destination_path: str) -> None: + def move_file(self, source_path: str, destination_path: str) -> None: """ - Move a file to a different directory. + Move a file to a different directory synchronously. Args: source_path: Current file path destination_path: Destination path """ + asyncio.run(self._move_file_async(source_path, destination_path)) + + async def _move_file_async(self, source_path: str, destination_path: str) -> None: + """Internal async implementation for move_file""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) result = await _exec_async( @@ -268,9 +305,9 @@ async def move_file(self, source_path: str, destination_path: str) -> None: raise SandboxFilesystemError(f"Permission denied: {source_path}") raise SandboxFilesystemError(f"Failed to move file: {result.stderr}") - async def write_files(self, files: List[Dict[str, str]]) -> None: + def write_files(self, files: List[Dict[str, str]]) -> None: """ - Write multiple files in a single operation. + Write multiple files in a single operation synchronously. Args: files: List of dictionaries, each with 'path', 'content', and optional 'encoding'. @@ -279,10 +316,14 @@ async def write_files(self, files: List[Dict[str, str]]) -> None: path = file_info["path"] content = file_info["content"] encoding = file_info.get("encoding", "utf-8") - await self.write_file(path, content, encoding) + self.write_file(path, content, encoding) - async def exists(self, path: str) -> bool: - """Check if file/directory exists""" + def exists(self, path: str) -> bool: + """Check if file/directory exists synchronously""" + return asyncio.run(self._exists_async(path)) + + async def _exists_async(self, path: str) -> bool: + """Internal async implementation for exists""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) result = await _exec_async( instance_id=self.sandbox.instance_id, @@ -291,8 +332,12 @@ async def exists(self, path: str) -> bool: ) return result.success - async def is_file(self, path: str) -> bool: - """Check if path is a file""" + def is_file(self, path: str) -> bool: + """Check if path is a file synchronously""" + return asyncio.run(self._is_file_async(path)) + + async def _is_file_async(self, path: str) -> bool: + """Internal async implementation for is_file""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) result = await _exec_async( instance_id=self.sandbox.instance_id, @@ -301,8 +346,12 @@ async def is_file(self, path: str) -> bool: ) return result.success - async def is_dir(self, path: str) -> bool: - """Check if path is a directory""" + def is_dir(self, path: str) -> bool: + """Check if path is a directory synchronously""" + return asyncio.run(self._is_dir_async(path)) + + async def _is_dir_async(self, path: str) -> bool: + """Internal async implementation for is_dir""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) result = await _exec_async( instance_id=self.sandbox.instance_id, @@ -311,14 +360,18 @@ async def is_dir(self, path: str) -> bool: ) return result.success - async def upload_file(self, local_path: str, remote_path: str) -> None: + def upload_file(self, local_path: str, remote_path: str) -> None: """ - Upload a local file to the sandbox. + Upload a local file to the sandbox synchronously. Args: local_path: Path to the local file remote_path: Destination path in the sandbox """ + asyncio.run(self._upload_file_async(local_path, remote_path)) + + async def _upload_file_async(self, local_path: str, remote_path: str) -> None: + """Internal async implementation for upload_file""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) if not os.path.exists(local_path): @@ -327,27 +380,31 @@ async def upload_file(self, local_path: str, remote_path: str) -> None: with open(local_path, "rb") as f: content = base64.b64encode(f.read()).decode("utf-8") - await self.write_file(remote_path, content, encoding="base64") + await self._write_file_async(remote_path, content, encoding="base64") - async def download_file(self, remote_path: str, local_path: str) -> None: + def download_file(self, remote_path: str, local_path: str) -> None: """ - Download a file from the sandbox to a local path. + Download a file from the sandbox to a local path synchronously. Args: remote_path: Path to the file in the sandbox local_path: Destination path on the local filesystem """ + asyncio.run(self._download_file_async(remote_path, local_path)) + + async def _download_file_async(self, remote_path: str, local_path: str) -> None: + """Internal async implementation for download_file""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - file_info = await self.read_file(remote_path, encoding="base64") + file_info = await self._read_file_async(remote_path, encoding="base64") content = base64.b64decode(file_info.content) with open(local_path, "wb") as f: f.write(content) - async def ls(self, path: str = ".") -> List[str]: + def ls(self, path: str = ".") -> List[str]: """ - List directory contents. + List directory contents synchronously. Args: path: Path to list @@ -355,16 +412,20 @@ async def ls(self, path: str = ".") -> List[str]: Returns: List of file/directory names """ - return await self.list_dir(path) + return self.list_dir(path) - async def rm(self, path: str, recursive: bool = False) -> None: + def rm(self, path: str, recursive: bool = False) -> None: """ - Remove file or directory. + Remove file or directory synchronously. Args: path: Path to remove recursive: Remove recursively """ + asyncio.run(self._rm_async(path, recursive)) + + async def _rm_async(self, path: str, recursive: bool = False) -> None: + """Internal async implementation for rm""" ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) if recursive: @@ -387,7 +448,7 @@ async def rm(self, path: str, recursive: bool = False) -> None: def open(self, path: str, mode: str = "r") -> "SandboxFileIO": """ - Open a file in the sandbox. + Open a file in the sandbox synchronously. Args: path: Path to the file @@ -399,8 +460,181 @@ def open(self, path: str, mode: str = "r") -> "SandboxFileIO": return SandboxFileIO(self, path, mode) +class AsyncSandboxFilesystem(SandboxFilesystem): + """ + Async filesystem operations for Koyeb Sandbox instances. + Inherits from SandboxFilesystem and provides async methods. + """ + + async def write_file( + self, path: str, content: Union[str, bytes], encoding: str = "utf-8" + ) -> None: + """ + Write content to a file asynchronously. + + Args: + path: Absolute path to the file + content: Content to write (string or bytes) + encoding: File encoding (default: "utf-8"). Use "base64" for binary data. + """ + await self._write_file_async(path, content, encoding) + + async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: + """ + Read a file from the sandbox asynchronously. + + Args: + path: Absolute path to the file + encoding: File encoding (default: "utf-8"). Use "base64" for binary data. + + Returns: + FileInfo: Object with content and encoding + """ + return await self._read_file_async(path, encoding) + + async def mkdir(self, path: str, recursive: bool = False) -> None: + """ + Create a directory asynchronously. + + Args: + path: Absolute path to the directory + recursive: Create parent directories if needed (default: False) + """ + await self._mkdir_async(path, recursive) + + async def list_dir(self, path: str = ".") -> List[str]: + """ + List contents of a directory asynchronously. + + Args: + path: Path to the directory (default: current directory) + + Returns: + List[str]: Names of files and directories within the specified path. + """ + return await self._list_dir_async(path) + + async def delete_file(self, path: str) -> None: + """ + Delete a file asynchronously. + + Args: + path: Absolute path to the file + """ + await self._delete_file_async(path) + + async def delete_dir(self, path: str) -> None: + """ + Delete a directory asynchronously. + + Args: + path: Absolute path to the directory + """ + await self._delete_dir_async(path) + + async def rename_file(self, old_path: str, new_path: str) -> None: + """ + Rename a file asynchronously. + + Args: + old_path: Current file path + new_path: New file path + """ + await self._rename_file_async(old_path, new_path) + + async def move_file(self, source_path: str, destination_path: str) -> None: + """ + Move a file to a different directory asynchronously. + + Args: + source_path: Current file path + destination_path: Destination path + """ + await self._move_file_async(source_path, destination_path) + + async def write_files(self, files: List[Dict[str, str]]) -> None: + """ + Write multiple files in a single operation asynchronously. + + Args: + files: List of dictionaries, each with 'path', 'content', and optional 'encoding'. + """ + for file_info in files: + path = file_info["path"] + content = file_info["content"] + encoding = file_info.get("encoding", "utf-8") + await self.write_file(path, content, encoding) + + async def exists(self, path: str) -> bool: + """Check if file/directory exists asynchronously""" + return await self._exists_async(path) + + async def is_file(self, path: str) -> bool: + """Check if path is a file asynchronously""" + return await self._is_file_async(path) + + async def is_dir(self, path: str) -> bool: + """Check if path is a directory asynchronously""" + return await self._is_dir_async(path) + + async def upload_file(self, local_path: str, remote_path: str) -> None: + """ + Upload a local file to the sandbox asynchronously. + + Args: + local_path: Path to the local file + remote_path: Destination path in the sandbox + """ + await self._upload_file_async(local_path, remote_path) + + async def download_file(self, remote_path: str, local_path: str) -> None: + """ + Download a file from the sandbox to a local path asynchronously. + + Args: + remote_path: Path to the file in the sandbox + local_path: Destination path on the local filesystem + """ + await self._download_file_async(remote_path, local_path) + + async def ls(self, path: str = ".") -> List[str]: + """ + List directory contents asynchronously. + + Args: + path: Path to list + + Returns: + List of file/directory names + """ + return await self.list_dir(path) + + async def rm(self, path: str, recursive: bool = False) -> None: + """ + Remove file or directory asynchronously. + + Args: + path: Path to remove + recursive: Remove recursively + """ + await self._rm_async(path, recursive) + + def open(self, path: str, mode: str = "r") -> "AsyncSandboxFileIO": + """ + Open a file in the sandbox asynchronously. + + Args: + path: Path to the file + mode: Open mode ('r', 'w', 'a', etc.) + + Returns: + AsyncSandboxFileIO: Async file handle + """ + return AsyncSandboxFileIO(self, path, mode) + + class SandboxFileIO: - """File I/O handle for sandbox files""" + """Synchronous file I/O handle for sandbox files""" def __init__(self, filesystem: SandboxFilesystem, path: str, mode: str): self.filesystem = filesystem @@ -408,8 +642,56 @@ def __init__(self, filesystem: SandboxFilesystem, path: str, mode: str): self.mode = mode self._closed = False + def read(self) -> str: + """Read file content synchronously""" + if "r" not in self.mode: + raise ValueError("File not opened for reading") + + if self._closed: + raise ValueError("File is closed") + + file_info = self.filesystem.read_file(self.path) + return file_info.content + + def write(self, content: str) -> None: + """Write content to file synchronously""" + if "w" not in self.mode and "a" not in self.mode: + raise ValueError("File not opened for writing") + + if self._closed: + raise ValueError("File is closed") + + if "a" in self.mode: + try: + existing = self.filesystem.read_file(self.path) + content = existing.content + content + except FileNotFoundError: + pass + + self.filesystem.write_file(self.path, content) + + def close(self) -> None: + """Close the file""" + self._closed = True + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + +class AsyncSandboxFileIO: + """Async file I/O handle for sandbox files""" + + def __init__(self, filesystem: AsyncSandboxFilesystem, path: str, mode: str): + self.filesystem = filesystem + self.path = path + self.mode = mode + self._closed = False + async def read(self) -> str: - """Read file content""" + """Read file content asynchronously""" if "r" not in self.mode: raise ValueError("File not opened for reading") @@ -420,7 +702,7 @@ async def read(self) -> str: return file_info.content async def write(self, content: str) -> None: - """Write content to file""" + """Write content to file asynchronously""" if "w" not in self.mode and "a" not in self.mode: raise ValueError("File not opened for writing") @@ -440,8 +722,8 @@ def close(self) -> None: """Close the file""" self._closed = True - def __enter__(self): + async def __aenter__(self): return self - def __exit__(self, exc_type, exc_val, exc_tb): + async def __aexit__(self, exc_type, exc_val, exc_tb): self.close() From 5b2864dca76efc599e1b71b0373a6b12c3fa63ab Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 09:24:58 +0100 Subject: [PATCH 07/47] Update examples to be sync by default --- examples/01_create_sandbox.py | 15 ++++++----- examples/02_basic_commands.py | 15 ++++++----- examples/03_streaming_output.py | 17 ++++++------- examples/04_environment_variables.py | 13 +++++----- examples/05_working_directory.py | 19 +++++++------- examples/06_file_operations.py | 19 +++++++------- examples/07_directory_operations.py | 29 +++++++++++----------- examples/08_binary_files.py | 13 +++++----- examples/09_batch_operations.py | 17 ++++++------- examples/10_upload_download.py | 17 ++++++------- examples/11_file_manipulation.py | 37 ++++++++++++++-------------- 11 files changed, 100 insertions(+), 111 deletions(-) diff --git a/examples/01_create_sandbox.py b/examples/01_create_sandbox.py index bc4f7e35..2c33fced 100644 --- a/examples/01_create_sandbox.py +++ b/examples/01_create_sandbox.py @@ -1,13 +1,12 @@ #!/usr/bin/env python3 """Create and manage a sandbox""" -import asyncio import os from koyeb import Sandbox -async def main(): +def main(): api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -15,7 +14,7 @@ async def main(): sandbox = None try: - sandbox = await Sandbox.create( + sandbox = Sandbox.create( image="python:3.11", name="example-sandbox", wait_ready=True, @@ -23,20 +22,20 @@ async def main(): ) # Check status - status = await sandbox.status() - is_healthy = await sandbox.is_healthy() + status = sandbox.status() + is_healthy = sandbox.is_healthy() print(f"Status: {status}, Healthy: {is_healthy}") # Test command - result = await sandbox.exec("echo 'Sandbox is ready!'") + result = sandbox.exec("echo 'Sandbox is ready!'") print(result.stdout.strip()) except Exception as e: print(f"Error: {e}") finally: if sandbox: - await sandbox.delete() + sandbox.delete() if __name__ == "__main__": - asyncio.run(main()) + main() diff --git a/examples/02_basic_commands.py b/examples/02_basic_commands.py index 9ff12565..65e70391 100644 --- a/examples/02_basic_commands.py +++ b/examples/02_basic_commands.py @@ -1,13 +1,12 @@ #!/usr/bin/env python3 """Basic command execution""" -import asyncio import os from koyeb import Sandbox -async def main(): +def main(): api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -15,7 +14,7 @@ async def main(): sandbox = None try: - sandbox = await Sandbox.create( + sandbox = Sandbox.create( image="python:3.11", name="basic-commands", wait_ready=True, @@ -23,15 +22,15 @@ async def main(): ) # Simple command - result = await sandbox.exec("echo 'Hello World'") + result = sandbox.exec("echo 'Hello World'") print(result.stdout.strip()) # Python command - result = await sandbox.exec("python3 -c 'print(2 + 2)'") + result = sandbox.exec("python3 -c 'print(2 + 2)'") print(result.stdout.strip()) # Multi-line Python script - result = await sandbox.exec( + result = sandbox.exec( '''python3 -c " import sys print(f'Python version: {sys.version.split()[0]}') @@ -44,8 +43,8 @@ async def main(): print(f"Error: {e}") finally: if sandbox: - await sandbox.delete() + sandbox.delete() if __name__ == "__main__": - asyncio.run(main()) + main() diff --git a/examples/03_streaming_output.py b/examples/03_streaming_output.py index 12c31b33..868763b3 100644 --- a/examples/03_streaming_output.py +++ b/examples/03_streaming_output.py @@ -1,13 +1,12 @@ #!/usr/bin/env python3 """Streaming command output""" -import asyncio import os from koyeb import Sandbox -async def main(): +def main(): api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -15,7 +14,7 @@ async def main(): sandbox = None try: - sandbox = await Sandbox.create( + sandbox = Sandbox.create( image="python:3.11", name="streaming", wait_ready=True, @@ -23,7 +22,7 @@ async def main(): ) # Stream output in real-time - result = await sandbox.exec( + result = sandbox.exec( '''python3 -c " import time for i in range(5): @@ -36,13 +35,13 @@ async def main(): print(f"\nExit code: {result.exit_code}") # Stream a script - await sandbox.filesystem.write_file( + sandbox.filesystem.write_file( "/tmp/counter.py", "#!/usr/bin/env python3\nimport time\nfor i in range(1, 6):\n print(f'Count: {i}')\n time.sleep(0.3)\nprint('Done!')\n", ) - await sandbox.exec("chmod +x /tmp/counter.py") + sandbox.exec("chmod +x /tmp/counter.py") - result = await sandbox.exec( + result = sandbox.exec( "python3 /tmp/counter.py", on_stdout=lambda data: print(data.strip()), ) @@ -51,8 +50,8 @@ async def main(): print(f"Error: {e}") finally: if sandbox: - await sandbox.delete() + sandbox.delete() if __name__ == "__main__": - asyncio.run(main()) + main() diff --git a/examples/04_environment_variables.py b/examples/04_environment_variables.py index bc8bb431..36963d00 100644 --- a/examples/04_environment_variables.py +++ b/examples/04_environment_variables.py @@ -1,13 +1,12 @@ #!/usr/bin/env python3 """Environment variables in commands""" -import asyncio import os from koyeb import Sandbox -async def main(): +def main(): api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -15,7 +14,7 @@ async def main(): sandbox = None try: - sandbox = await Sandbox.create( + sandbox = Sandbox.create( image="python:3.11", name="env-vars", wait_ready=True, @@ -24,11 +23,11 @@ async def main(): # Set environment variables env_vars = {"MY_VAR": "Hello", "DEBUG": "true"} - result = await sandbox.exec("env | grep MY_VAR", env=env_vars) + result = sandbox.exec("env | grep MY_VAR", env=env_vars) print(result.stdout.strip()) # Use in Python command - result = await sandbox.exec( + result = sandbox.exec( 'python3 -c "import os; print(os.getenv(\'MY_VAR\'))"', env={"MY_VAR": "Hello from Python!"}, ) @@ -38,8 +37,8 @@ async def main(): print(f"Error: {e}") finally: if sandbox: - await sandbox.delete() + sandbox.delete() if __name__ == "__main__": - asyncio.run(main()) + main() diff --git a/examples/05_working_directory.py b/examples/05_working_directory.py index bbfceb5f..30c3583d 100644 --- a/examples/05_working_directory.py +++ b/examples/05_working_directory.py @@ -1,13 +1,12 @@ #!/usr/bin/env python3 """Working directory for commands""" -import asyncio import os from koyeb import Sandbox -async def main(): +def main(): api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -15,7 +14,7 @@ async def main(): sandbox = None try: - sandbox = await Sandbox.create( + sandbox = Sandbox.create( image="python:3.11", name="working-dir", wait_ready=True, @@ -23,27 +22,27 @@ async def main(): ) # Setup: create directory structure - await sandbox.exec("mkdir -p /tmp/my_project/src") - await sandbox.exec("echo 'print(\\\"hello\\\")' > /tmp/my_project/src/main.py") + sandbox.exec("mkdir -p /tmp/my_project/src") + sandbox.exec("echo 'print(\\\"hello\\\")' > /tmp/my_project/src/main.py") # Run command in specific directory - result = await sandbox.exec("pwd", cwd="/tmp/my_project") + result = sandbox.exec("pwd", cwd="/tmp/my_project") print(result.stdout.strip()) # List files in working directory - result = await sandbox.exec("ls -la", cwd="/tmp/my_project") + result = sandbox.exec("ls -la", cwd="/tmp/my_project") print(result.stdout.strip()) # Use relative paths - result = await sandbox.exec("cat src/main.py", cwd="/tmp/my_project") + result = sandbox.exec("cat src/main.py", cwd="/tmp/my_project") print(result.stdout.strip()) except Exception as e: print(f"Error: {e}") finally: if sandbox: - await sandbox.delete() + sandbox.delete() if __name__ == "__main__": - asyncio.run(main()) + main() diff --git a/examples/06_file_operations.py b/examples/06_file_operations.py index 004875d0..3dffceae 100644 --- a/examples/06_file_operations.py +++ b/examples/06_file_operations.py @@ -1,13 +1,12 @@ #!/usr/bin/env python3 """Basic file operations""" -import asyncio import os from koyeb import Sandbox -async def main(): +def main(): api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -15,7 +14,7 @@ async def main(): sandbox = None try: - sandbox = await Sandbox.create( + sandbox = Sandbox.create( image="python:3.11", name="file-ops", wait_ready=True, @@ -26,25 +25,25 @@ async def main(): # Write file content = "Hello, Koyeb Sandbox!\nThis is a test file." - await fs.write_file("/tmp/hello.txt", content) + fs.write_file("/tmp/hello.txt", content) # Read file - file_info = await fs.read_file("/tmp/hello.txt") + file_info = fs.read_file("/tmp/hello.txt") print(file_info.content) # Write Python script python_code = "#!/usr/bin/env python3\nprint('Hello from Python!')\n" - await fs.write_file("/tmp/script.py", python_code) - await sandbox.exec("chmod +x /tmp/script.py") - result = await sandbox.exec("/tmp/script.py") + fs.write_file("/tmp/script.py", python_code) + sandbox.exec("chmod +x /tmp/script.py") + result = sandbox.exec("/tmp/script.py") print(result.stdout.strip()) except Exception as e: print(f"Error: {e}") finally: if sandbox: - await sandbox.delete() + sandbox.delete() if __name__ == "__main__": - asyncio.run(main()) + main() diff --git a/examples/07_directory_operations.py b/examples/07_directory_operations.py index 9d2d14a6..38da4ddf 100644 --- a/examples/07_directory_operations.py +++ b/examples/07_directory_operations.py @@ -1,13 +1,12 @@ #!/usr/bin/env python3 """Directory operations""" -import asyncio import os from koyeb import Sandbox -async def main(): +def main(): api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -15,7 +14,7 @@ async def main(): sandbox = None try: - sandbox = await Sandbox.create( + sandbox = Sandbox.create( image="python:3.11", name="directory-ops", wait_ready=True, @@ -25,33 +24,33 @@ async def main(): fs = sandbox.filesystem # Create directory - await fs.mkdir("/tmp/my_project") + fs.mkdir("/tmp/my_project") # Create nested directories - await fs.mkdir("/tmp/my_project/src/utils", recursive=True) + fs.mkdir("/tmp/my_project/src/utils", recursive=True) # List directory - contents = await fs.list_dir("/tmp/my_project") + contents = fs.list_dir("/tmp/my_project") print(f"Contents: {contents}") # Create project structure - await fs.mkdir("/tmp/my_project/src", recursive=True) - await fs.mkdir("/tmp/my_project/tests", recursive=True) - await fs.write_file("/tmp/my_project/src/main.py", "print('Hello')") - await fs.write_file("/tmp/my_project/README.md", "# My Project") + fs.mkdir("/tmp/my_project/src", recursive=True) + fs.mkdir("/tmp/my_project/tests", recursive=True) + fs.write_file("/tmp/my_project/src/main.py", "print('Hello')") + fs.write_file("/tmp/my_project/README.md", "# My Project") # Check if path exists - exists = await fs.exists("/tmp/my_project") - is_dir = await fs.is_dir("/tmp/my_project") - is_file = await fs.is_file("/tmp/my_project/src/main.py") + exists = fs.exists("/tmp/my_project") + is_dir = fs.is_dir("/tmp/my_project") + is_file = fs.is_file("/tmp/my_project/src/main.py") print(f"Exists: {exists}, Is dir: {is_dir}, Is file: {is_file}") except Exception as e: print(f"Error: {e}") finally: if sandbox: - await sandbox.delete() + sandbox.delete() if __name__ == "__main__": - asyncio.run(main()) + main() diff --git a/examples/08_binary_files.py b/examples/08_binary_files.py index 10eb5964..06d97107 100644 --- a/examples/08_binary_files.py +++ b/examples/08_binary_files.py @@ -1,14 +1,13 @@ #!/usr/bin/env python3 """Binary file operations""" -import asyncio import base64 import os from koyeb import Sandbox -async def main(): +def main(): api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -16,7 +15,7 @@ async def main(): sandbox = None try: - sandbox = await Sandbox.create( + sandbox = Sandbox.create( image="python:3.11", name="binary-files", wait_ready=True, @@ -28,10 +27,10 @@ async def main(): # Write binary data binary_data = b"Binary data: \x00\x01\x02\x03\xff\xfe\xfd" base64_data = base64.b64encode(binary_data).decode("utf-8") - await fs.write_file("/tmp/binary.bin", base64_data, encoding="base64") + fs.write_file("/tmp/binary.bin", base64_data, encoding="base64") # Read binary data - file_info = await fs.read_file("/tmp/binary.bin", encoding="base64") + file_info = fs.read_file("/tmp/binary.bin", encoding="base64") decoded = base64.b64decode(file_info.content) print(f"Original: {binary_data}") print(f"Decoded: {decoded}") @@ -41,8 +40,8 @@ async def main(): print(f"Error: {e}") finally: if sandbox: - await sandbox.delete() + sandbox.delete() if __name__ == "__main__": - asyncio.run(main()) + main() diff --git a/examples/09_batch_operations.py b/examples/09_batch_operations.py index 8e43f42c..c6d4c41d 100644 --- a/examples/09_batch_operations.py +++ b/examples/09_batch_operations.py @@ -1,13 +1,12 @@ #!/usr/bin/env python3 """Batch file operations""" -import asyncio import os from koyeb import Sandbox -async def main(): +def main(): api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -15,7 +14,7 @@ async def main(): sandbox = None try: - sandbox = await Sandbox.create( + sandbox = Sandbox.create( image="python:3.11", name="batch-ops", wait_ready=True, @@ -31,11 +30,11 @@ async def main(): {"path": "/tmp/file3.txt", "content": "Content of file 3"}, ] - await fs.write_files(files_to_create) + fs.write_files(files_to_create) print("Created 3 files") # Verify - created_files = await fs.ls("/tmp") + created_files = fs.ls("/tmp") batch_files = [f for f in created_files if f.startswith("file")] print(f"Files: {batch_files}") @@ -46,16 +45,16 @@ async def main(): {"path": "/tmp/project/README.md", "content": "# My Project"}, ] - await fs.mkdir("/tmp/project", recursive=True) - await fs.write_files(project_files) + fs.mkdir("/tmp/project", recursive=True) + fs.write_files(project_files) print("Created project structure") except Exception as e: print(f"Error: {e}") finally: if sandbox: - await sandbox.delete() + sandbox.delete() if __name__ == "__main__": - asyncio.run(main()) + main() diff --git a/examples/10_upload_download.py b/examples/10_upload_download.py index f7d57553..beabbd50 100644 --- a/examples/10_upload_download.py +++ b/examples/10_upload_download.py @@ -1,14 +1,13 @@ #!/usr/bin/env python3 """Upload and download files""" -import asyncio import os import tempfile from koyeb import Sandbox -async def main(): +def main(): api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -16,7 +15,7 @@ async def main(): sandbox = None try: - sandbox = await Sandbox.create( + sandbox = Sandbox.create( image="python:3.11", name="upload-download", wait_ready=True, @@ -32,14 +31,14 @@ async def main(): local_file = f.name try: - await fs.upload_file(local_file, "/tmp/uploaded_file.txt") - uploaded_info = await fs.read_file("/tmp/uploaded_file.txt") + fs.upload_file(local_file, "/tmp/uploaded_file.txt") + uploaded_info = fs.read_file("/tmp/uploaded_file.txt") print(uploaded_info.content) finally: os.unlink(local_file) # Download file from sandbox - await fs.write_file( + fs.write_file( "/tmp/download_source.txt", "Download test content\nMultiple lines" ) @@ -47,7 +46,7 @@ async def main(): download_path = f.name try: - await fs.download_file("/tmp/download_source.txt", download_path) + fs.download_file("/tmp/download_source.txt", download_path) with open(download_path, "r") as f: print(f.read()) finally: @@ -57,8 +56,8 @@ async def main(): print(f"Error: {e}") finally: if sandbox: - await sandbox.delete() + sandbox.delete() if __name__ == "__main__": - asyncio.run(main()) + main() diff --git a/examples/11_file_manipulation.py b/examples/11_file_manipulation.py index 8177a3ab..4f3db43f 100644 --- a/examples/11_file_manipulation.py +++ b/examples/11_file_manipulation.py @@ -1,13 +1,12 @@ #!/usr/bin/env python3 """File manipulation operations""" -import asyncio import os from koyeb import Sandbox -async def main(): +def main(): api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -15,7 +14,7 @@ async def main(): sandbox = None try: - sandbox = await Sandbox.create( + sandbox = Sandbox.create( image="python:3.11", name="file-manip", wait_ready=True, @@ -25,37 +24,37 @@ async def main(): fs = sandbox.filesystem # Setup - await fs.write_file("/tmp/file1.txt", "Content of file 1") - await fs.write_file("/tmp/file2.txt", "Content of file 2") - await fs.mkdir("/tmp/test_dir", recursive=True) + fs.write_file("/tmp/file1.txt", "Content of file 1") + fs.write_file("/tmp/file2.txt", "Content of file 2") + fs.mkdir("/tmp/test_dir", recursive=True) # Rename file - await fs.rename_file("/tmp/file1.txt", "/tmp/renamed_file.txt") - print(f"Renamed: {await fs.exists('/tmp/renamed_file.txt')}") + fs.rename_file("/tmp/file1.txt", "/tmp/renamed_file.txt") + print(f"Renamed: {fs.exists('/tmp/renamed_file.txt')}") # Move file - await fs.move_file("/tmp/file2.txt", "/tmp/test_dir/moved_file.txt") - print(f"Moved: {await fs.exists('/tmp/test_dir/moved_file.txt')}") + fs.move_file("/tmp/file2.txt", "/tmp/test_dir/moved_file.txt") + print(f"Moved: {fs.exists('/tmp/test_dir/moved_file.txt')}") # Copy file (read + write) - original_content = await fs.read_file("/tmp/renamed_file.txt") - await fs.write_file("/tmp/test_dir/copied_file.txt", original_content.content) - print(f"Copied: {await fs.exists('/tmp/test_dir/copied_file.txt')}") + original_content = fs.read_file("/tmp/renamed_file.txt") + fs.write_file("/tmp/test_dir/copied_file.txt", original_content.content) + print(f"Copied: {fs.exists('/tmp/test_dir/copied_file.txt')}") # Delete file - await fs.rm("/tmp/renamed_file.txt") - print(f"Deleted: {not await fs.exists('/tmp/renamed_file.txt')}") + fs.rm("/tmp/renamed_file.txt") + print(f"Deleted: {not fs.exists('/tmp/renamed_file.txt')}") # Delete directory - await fs.rm("/tmp/test_dir", recursive=True) - print(f"Directory deleted: {not await fs.exists('/tmp/test_dir')}") + fs.rm("/tmp/test_dir", recursive=True) + print(f"Directory deleted: {not fs.exists('/tmp/test_dir')}") except Exception as e: print(f"Error: {e}") finally: if sandbox: - await sandbox.delete() + sandbox.delete() if __name__ == "__main__": - asyncio.run(main()) + main() From 1164d58819a15daa5f720d1e81290e3b2bf241f6 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 09:31:03 +0100 Subject: [PATCH 08/47] Update examples --- examples/02_create_sandbox_with_timing.py | 196 ++++++++++++++++++ ...basic_commands.py => 03_basic_commands.py} | 0 ...aming_output.py => 04_streaming_output.py} | 0 ...riables.py => 05_environment_variables.py} | 0 ...g_directory.py => 06_working_directory.py} | 0 ...le_operations.py => 07_file_operations.py} | 0 ...erations.py => 08_directory_operations.py} | 0 ...{08_binary_files.py => 09_binary_files.py} | 0 ...h_operations.py => 10_batch_operations.py} | 0 ...load_download.py => 11_upload_download.py} | 0 ...anipulation.py => 12_file_manipulation.py} | 0 11 files changed, 196 insertions(+) create mode 100644 examples/02_create_sandbox_with_timing.py rename examples/{02_basic_commands.py => 03_basic_commands.py} (100%) rename examples/{03_streaming_output.py => 04_streaming_output.py} (100%) rename examples/{04_environment_variables.py => 05_environment_variables.py} (100%) rename examples/{05_working_directory.py => 06_working_directory.py} (100%) rename examples/{06_file_operations.py => 07_file_operations.py} (100%) rename examples/{07_directory_operations.py => 08_directory_operations.py} (100%) rename examples/{08_binary_files.py => 09_binary_files.py} (100%) rename examples/{09_batch_operations.py => 10_batch_operations.py} (100%) rename examples/{10_upload_download.py => 11_upload_download.py} (100%) rename examples/{11_file_manipulation.py => 12_file_manipulation.py} (100%) diff --git a/examples/02_create_sandbox_with_timing.py b/examples/02_create_sandbox_with_timing.py new file mode 100644 index 00000000..98f8f4c7 --- /dev/null +++ b/examples/02_create_sandbox_with_timing.py @@ -0,0 +1,196 @@ +#!/usr/bin/env python3 +"""Create and manage a sandbox with detailed timing information for debugging""" + +import os +import time +from datetime import datetime +from collections import defaultdict +try: + from tqdm import tqdm +except ImportError: + print("Warning: tqdm not installed. Install with: pip install tqdm") + print("Continuing without progress bars...\n") + tqdm = None + + +from koyeb import Sandbox + + +class TimingTracker: + """Track timing information for operations""" + def __init__(self): + self.operations = [] + self.categories = defaultdict(list) + + def record(self, name, duration, category="general"): + """Record an operation's timing""" + self.operations.append({ + 'name': name, + 'duration': duration, + 'category': category, + 'timestamp': datetime.now() + }) + self.categories[category].append(duration) + + def get_total_time(self): + """Get total time for all operations""" + return sum(op['duration'] for op in self.operations) + + def get_category_total(self, category): + """Get total time for a specific category""" + return sum(self.categories[category]) + + def print_recap(self): + """Print a detailed recap of all timings""" + print("\n" + "="*60) + print("TIMING RECAP") + print("="*60) + + if not self.operations: + print("No operations recorded") + return + + # Print individual operations + print("\nIndividual Operations:") + print("-" * 60) + max_name_len = max(len(op['name']) for op in self.operations) + + for op in self.operations: + bar_length = int(op['duration'] * 10) # Scale for visualization + bar = "█" * min(bar_length, 40) + print(f" {op['name']:<{max_name_len}} : {op['duration']:6.3f}s {bar}") + + # Print category summaries + if len(self.categories) > 1: + print("\nCategory Summaries:") + print("-" * 60) + for category, times in sorted(self.categories.items()): + total = sum(times) + count = len(times) + avg = total / count if count > 0 else 0 + print(f" {category.capitalize():<20} : {total:6.3f}s total, {avg:6.3f}s avg ({count} ops)") + + # Print total time with progress bar + total_time = self.get_total_time() + print("\n" + "-" * 60) + print(f" {'TOTAL TIME':<{max_name_len}} : {total_time:6.3f}s") + print("="*60) + + +def log_with_timestamp(message, start_time=None): + """Log a message with timestamp and optionally elapsed time""" + current_time = datetime.now() + timestamp = current_time.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + + if start_time: + elapsed = time.time() - start_time + print(f"[{timestamp}] (+{elapsed:.3f}s) {message}") + else: + print(f"[{timestamp}] {message}") + + return time.time() + + +def main(): + script_start = time.time() + tracker = TimingTracker() + log_with_timestamp("=== Starting sandbox creation with timing debug ===") + + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + # Create sandbox with timing + create_start = log_with_timestamp("Creating sandbox...") + sandbox = Sandbox.create( + image="python:3.11", + name="example-sandbox-timed", + wait_ready=True, + api_token=api_token, + ) + create_duration = time.time() - create_start + tracker.record("Sandbox creation", create_duration, "setup") + log_with_timestamp("Sandbox created successfully", create_start) + + # Check status with timing + status_start = log_with_timestamp("Checking sandbox status...") + status = sandbox.status() + is_healthy = sandbox.is_healthy() + status_duration = time.time() - status_start + tracker.record("Status check", status_duration, "monitoring") + log_with_timestamp( + f"Status check complete - Status: {status}, Healthy: {is_healthy}", + status_start + ) + + # Test command execution with timing + exec_start = log_with_timestamp("Executing test command...") + result = sandbox.exec("echo 'Sandbox is ready!'") + exec_duration = time.time() - exec_start + tracker.record("Initial exec command", exec_duration, "execution") + log_with_timestamp( + f"Command executed - Output: {result.stdout.strip()}", + exec_start + ) + + # Additional timing tests + log_with_timestamp("\n=== Running additional timing tests ===") + + # Test multiple commands + test_range = range(3) + iterator = tqdm(test_range, desc="Running test commands", unit="cmd") if tqdm else test_range + + for i in iterator: + cmd_start = log_with_timestamp(f"Running command {i+1}/3...") + result = sandbox.exec(f"echo 'Test {i+1}'") + cmd_duration = time.time() - cmd_start + tracker.record(f"Test command {i+1}", cmd_duration, "execution") + log_with_timestamp( + f"Command {i+1} completed - Output: {result.stdout.strip()}", + cmd_start + ) + + # Test a longer-running command + long_cmd_start = log_with_timestamp("Running longer command (sleep 2)...") + + # Show progress bar for long command if tqdm is available + if tqdm: + with tqdm(total=100, desc="Long command progress", bar_format='{l_bar}{bar}| {elapsed}') as pbar: + for _ in range(10): + time.sleep(0.2) + pbar.update(10) + result = sandbox.exec("sleep 2 && echo 'Done sleeping'") + long_cmd_duration = time.time() - long_cmd_start + tracker.record("Long command (sleep 2)", long_cmd_duration, "execution") + log_with_timestamp( + f"Long command completed - Output: {result.stdout.strip()}", + long_cmd_start + ) + + except Exception as e: + log_with_timestamp(f"Error occurred: {e}") + import traceback + traceback.print_exc() + finally: + if sandbox: + delete_start = log_with_timestamp("Deleting sandbox...") + sandbox.delete() + delete_duration = time.time() - delete_start + tracker.record("Sandbox deletion", delete_duration, "cleanup") + log_with_timestamp("Sandbox deleted successfully", delete_start) + + total_script_time = time.time() - script_start + log_with_timestamp( + f"\n=== Script completed ===", + script_start + ) + + # Print detailed recap + tracker.print_recap() + + +if __name__ == "__main__": + main() diff --git a/examples/02_basic_commands.py b/examples/03_basic_commands.py similarity index 100% rename from examples/02_basic_commands.py rename to examples/03_basic_commands.py diff --git a/examples/03_streaming_output.py b/examples/04_streaming_output.py similarity index 100% rename from examples/03_streaming_output.py rename to examples/04_streaming_output.py diff --git a/examples/04_environment_variables.py b/examples/05_environment_variables.py similarity index 100% rename from examples/04_environment_variables.py rename to examples/05_environment_variables.py diff --git a/examples/05_working_directory.py b/examples/06_working_directory.py similarity index 100% rename from examples/05_working_directory.py rename to examples/06_working_directory.py diff --git a/examples/06_file_operations.py b/examples/07_file_operations.py similarity index 100% rename from examples/06_file_operations.py rename to examples/07_file_operations.py diff --git a/examples/07_directory_operations.py b/examples/08_directory_operations.py similarity index 100% rename from examples/07_directory_operations.py rename to examples/08_directory_operations.py diff --git a/examples/08_binary_files.py b/examples/09_binary_files.py similarity index 100% rename from examples/08_binary_files.py rename to examples/09_binary_files.py diff --git a/examples/09_batch_operations.py b/examples/10_batch_operations.py similarity index 100% rename from examples/09_batch_operations.py rename to examples/10_batch_operations.py diff --git a/examples/10_upload_download.py b/examples/11_upload_download.py similarity index 100% rename from examples/10_upload_download.py rename to examples/11_upload_download.py diff --git a/examples/11_file_manipulation.py b/examples/12_file_manipulation.py similarity index 100% rename from examples/11_file_manipulation.py rename to examples/12_file_manipulation.py From b0157432f72f3b7412018cfe2ac08bba2f0ed711 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 09:33:22 +0100 Subject: [PATCH 09/47] Add tqdm for the examples --- pyproject.toml | 1 + uv.lock | 14 ++++++++++++++ 2 files changed, 15 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 4a083f8d..95189c3d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ dev = [ "flake8>=7.3.0", "pydoc-markdown>=4.8.2", "pytest>=8.4.2", + "tqdm>=4.67.1", ] [tool.pylint.'MESSAGES CONTROL'] diff --git a/uv.lock b/uv.lock index d6ddba70..c1c513ac 100644 --- a/uv.lock +++ b/uv.lock @@ -392,6 +392,7 @@ dev = [ { name = "flake8" }, { name = "pydoc-markdown" }, { name = "pytest" }, + { name = "tqdm" }, ] [package.metadata] @@ -409,6 +410,7 @@ dev = [ { name = "flake8", specifier = ">=7.3.0" }, { name = "pydoc-markdown", specifier = ">=4.8.2" }, { name = "pytest", specifier = ">=8.4.2" }, + { name = "tqdm", specifier = ">=4.67.1" }, ] [[package]] @@ -997,6 +999,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/18/c86eb8e0202e32dd3df50d43d7ff9854f8e0603945ff398974c1d91ac1ef/tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90", size = 6675, upload-time = "2025-01-15T12:07:22.074Z" }, ] +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, +] + [[package]] name = "typeapi" version = "2.3.0" From bdb1f98e7b4794545999c9a58a6278256dcdd3e1 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 09:41:55 +0100 Subject: [PATCH 10/47] Update 02_create_sandbox_with_timing.py --- examples/02_create_sandbox_with_timing.py | 174 +++++++++------------- 1 file changed, 72 insertions(+), 102 deletions(-) diff --git a/examples/02_create_sandbox_with_timing.py b/examples/02_create_sandbox_with_timing.py index 98f8f4c7..d8832ede 100644 --- a/examples/02_create_sandbox_with_timing.py +++ b/examples/02_create_sandbox_with_timing.py @@ -1,16 +1,11 @@ #!/usr/bin/env python3 """Create and manage a sandbox with detailed timing information for debugging""" +import argparse import os import time -from datetime import datetime from collections import defaultdict -try: - from tqdm import tqdm -except ImportError: - print("Warning: tqdm not installed. Install with: pip install tqdm") - print("Continuing without progress bars...\n") - tqdm = None +from datetime import datetime from koyeb import Sandbox @@ -42,59 +37,37 @@ def get_category_total(self, category): def print_recap(self): """Print a detailed recap of all timings""" - print("\n" + "="*60) - print("TIMING RECAP") - print("="*60) + print("\n" + "="*70) + print(" TIMING SUMMARY") + print("="*70) if not self.operations: print("No operations recorded") return + total_time = self.get_total_time() + # Print individual operations - print("\nIndividual Operations:") - print("-" * 60) - max_name_len = max(len(op['name']) for op in self.operations) + print() for op in self.operations: - bar_length = int(op['duration'] * 10) # Scale for visualization - bar = "█" * min(bar_length, 40) - print(f" {op['name']:<{max_name_len}} : {op['duration']:6.3f}s {bar}") - - # Print category summaries - if len(self.categories) > 1: - print("\nCategory Summaries:") - print("-" * 60) - for category, times in sorted(self.categories.items()): - total = sum(times) - count = len(times) - avg = total / count if count > 0 else 0 - print(f" {category.capitalize():<20} : {total:6.3f}s total, {avg:6.3f}s avg ({count} ops)") + percentage = (op['duration'] / total_time * 100) if total_time > 0 else 0 + bar_length = int(percentage / 2) # 50 chars = 100% + bar = "█" * bar_length + + print(f" {op['name']:<30} {op['duration']:6.2f}s {percentage:5.1f}% {bar}") - # Print total time with progress bar - total_time = self.get_total_time() - print("\n" + "-" * 60) - print(f" {'TOTAL TIME':<{max_name_len}} : {total_time:6.3f}s") - print("="*60) - - -def log_with_timestamp(message, start_time=None): - """Log a message with timestamp and optionally elapsed time""" - current_time = datetime.now() - timestamp = current_time.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] - - if start_time: - elapsed = time.time() - start_time - print(f"[{timestamp}] (+{elapsed:.3f}s) {message}") - else: - print(f"[{timestamp}] {message}") - - return time.time() + print() + print("-" * 70) + print(f" {'TOTAL':<30} {total_time:6.2f}s 100.0%") + print("="*70) -def main(): +def main(run_long_tests=False): script_start = time.time() tracker = TimingTracker() - log_with_timestamp("=== Starting sandbox creation with timing debug ===") + + print("Starting sandbox operations...") api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: @@ -104,7 +77,8 @@ def main(): sandbox = None try: # Create sandbox with timing - create_start = log_with_timestamp("Creating sandbox...") + print(" → Creating sandbox...") + create_start = time.time() sandbox = Sandbox.create( image="python:3.11", name="example-sandbox-timed", @@ -113,84 +87,80 @@ def main(): ) create_duration = time.time() - create_start tracker.record("Sandbox creation", create_duration, "setup") - log_with_timestamp("Sandbox created successfully", create_start) + print(f" ✓ took {create_duration:.1f}s") # Check status with timing - status_start = log_with_timestamp("Checking sandbox status...") + print(" → Checking sandbox status...") + status_start = time.time() status = sandbox.status() is_healthy = sandbox.is_healthy() status_duration = time.time() - status_start tracker.record("Status check", status_duration, "monitoring") - log_with_timestamp( - f"Status check complete - Status: {status}, Healthy: {is_healthy}", - status_start - ) + print(f" ✓ took {status_duration:.1f}s") # Test command execution with timing - exec_start = log_with_timestamp("Executing test command...") + print(" → Executing initial test command...") + exec_start = time.time() result = sandbox.exec("echo 'Sandbox is ready!'") exec_duration = time.time() - exec_start tracker.record("Initial exec command", exec_duration, "execution") - log_with_timestamp( - f"Command executed - Output: {result.stdout.strip()}", - exec_start - ) - - # Additional timing tests - log_with_timestamp("\n=== Running additional timing tests ===") - - # Test multiple commands - test_range = range(3) - iterator = tqdm(test_range, desc="Running test commands", unit="cmd") if tqdm else test_range - - for i in iterator: - cmd_start = log_with_timestamp(f"Running command {i+1}/3...") - result = sandbox.exec(f"echo 'Test {i+1}'") - cmd_duration = time.time() - cmd_start - tracker.record(f"Test command {i+1}", cmd_duration, "execution") - log_with_timestamp( - f"Command {i+1} completed - Output: {result.stdout.strip()}", - cmd_start - ) - - # Test a longer-running command - long_cmd_start = log_with_timestamp("Running longer command (sleep 2)...") - - # Show progress bar for long command if tqdm is available - if tqdm: - with tqdm(total=100, desc="Long command progress", bar_format='{l_bar}{bar}| {elapsed}') as pbar: - for _ in range(10): - time.sleep(0.2) - pbar.update(10) - result = sandbox.exec("sleep 2 && echo 'Done sleeping'") - long_cmd_duration = time.time() - long_cmd_start - tracker.record("Long command (sleep 2)", long_cmd_duration, "execution") - log_with_timestamp( - f"Long command completed - Output: {result.stdout.strip()}", - long_cmd_start - ) + print(f" ✓ took {exec_duration:.1f}s") + + if run_long_tests: + # Long test 1: Install a package + print(" → [LONG TEST] Installing a package...") + install_start = time.time() + result = sandbox.exec("pip install requests") + install_duration = time.time() - install_start + tracker.record("Package installation", install_duration, "long_tests") + print(f" ✓ took {install_duration:.1f}s") + + # Long test 2: Run a computation + print(" → [LONG TEST] Running computation...") + compute_start = time.time() + result = sandbox.exec("python -c 'import time; sum(range(10000000)); time.sleep(2)'") + compute_duration = time.time() - compute_start + tracker.record("Heavy computation", compute_duration, "long_tests") + print(f" ✓ took {compute_duration:.1f}s") + + # Long test 3: Multiple status checks + print(" → [LONG TEST] Multiple status checks...") + multi_check_start = time.time() + for i in range(5): + sandbox.status() + time.sleep(0.5) + multi_check_duration = time.time() - multi_check_start + tracker.record("Multiple status checks (5x)", multi_check_duration, "long_tests") + print(f" ✓ took {multi_check_duration:.1f}s") except Exception as e: - log_with_timestamp(f"Error occurred: {e}") + print(f"\n✗ Error occurred: {e}") import traceback traceback.print_exc() finally: if sandbox: - delete_start = log_with_timestamp("Deleting sandbox...") + print(" → Deleting sandbox...") + delete_start = time.time() sandbox.delete() delete_duration = time.time() - delete_start tracker.record("Sandbox deletion", delete_duration, "cleanup") - log_with_timestamp("Sandbox deleted successfully", delete_start) + print(f" ✓ took {delete_duration:.1f}s") - total_script_time = time.time() - script_start - log_with_timestamp( - f"\n=== Script completed ===", - script_start - ) + print("\n✓ All operations completed") # Print detailed recap tracker.print_recap() if __name__ == "__main__": - main() + parser = argparse.ArgumentParser( + description="Create and manage a sandbox with detailed timing information" + ) + parser.add_argument( + "--long", + action="store_true", + help="Run longer tests (package installation, computation, etc.)" + ) + + args = parser.parse_args() + main(run_long_tests=args.long) From 9fd75ccbacb2d5a28fdad15ddf0c25ce345f1e12 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 09:54:24 +0100 Subject: [PATCH 11/47] Use async filesystem for async sandbox --- koyeb/sandbox/sandbox.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index c7d14b83..afe639fd 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -360,3 +360,11 @@ def exec(self): from .exec import AsyncSandboxExecutor return AsyncSandboxExecutor(self) + + @property + def filesystem(self): + """Get filesystem operations interface""" + from .filesystem import AsyncSandboxFilesystem + + return AsyncSandboxFilesystem(self) + From e9681096b26c13c054c4394464bac8b2f4bb29e5 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 09:54:42 +0100 Subject: [PATCH 12/47] Create async variant for each examples --- examples/01_create_sandbox_async.py | 42 +++++ .../02_create_sandbox_with_timing_async.py | 167 ++++++++++++++++++ examples/03_basic_commands_async.py | 51 ++++++ examples/04_streaming_output_async.py | 58 ++++++ examples/05_environment_variables_async.py | 45 +++++ examples/06_working_directory_async.py | 49 +++++ examples/07_file_operations_async.py | 50 ++++++ examples/08_directory_operations_async.py | 57 ++++++ examples/09_binary_files_async.py | 48 +++++ examples/10_batch_operations_async.py | 61 +++++++ examples/11_upload_download_async.py | 64 +++++++ examples/12_file_manipulation_async.py | 66 +++++++ 12 files changed, 758 insertions(+) create mode 100644 examples/01_create_sandbox_async.py create mode 100644 examples/02_create_sandbox_with_timing_async.py create mode 100644 examples/03_basic_commands_async.py create mode 100644 examples/04_streaming_output_async.py create mode 100644 examples/05_environment_variables_async.py create mode 100644 examples/06_working_directory_async.py create mode 100644 examples/07_file_operations_async.py create mode 100644 examples/08_directory_operations_async.py create mode 100644 examples/09_binary_files_async.py create mode 100644 examples/10_batch_operations_async.py create mode 100644 examples/11_upload_download_async.py create mode 100644 examples/12_file_manipulation_async.py diff --git a/examples/01_create_sandbox_async.py b/examples/01_create_sandbox_async.py new file mode 100644 index 00000000..5563b780 --- /dev/null +++ b/examples/01_create_sandbox_async.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +"""Create and manage a sandbox (async variant)""" + +import asyncio +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="example-sandbox", + wait_ready=True, + api_token=api_token, + ) + + # Check status + status = await sandbox.status() + is_healthy = await sandbox.is_healthy() + print(f"Status: {status}, Healthy: {is_healthy}") + + # Test command + result = await sandbox.exec("echo 'Sandbox is ready!'") + print(result.stdout.strip()) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/02_create_sandbox_with_timing_async.py b/examples/02_create_sandbox_with_timing_async.py new file mode 100644 index 00000000..2abe37c9 --- /dev/null +++ b/examples/02_create_sandbox_with_timing_async.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 +"""Create and manage a sandbox with detailed timing information for debugging (async variant)""" + +import argparse +import asyncio +import os +import time +from collections import defaultdict +from datetime import datetime + + +from koyeb import AsyncSandbox + + +class TimingTracker: + """Track timing information for operations""" + def __init__(self): + self.operations = [] + self.categories = defaultdict(list) + + def record(self, name, duration, category="general"): + """Record an operation's timing""" + self.operations.append({ + 'name': name, + 'duration': duration, + 'category': category, + 'timestamp': datetime.now() + }) + self.categories[category].append(duration) + + def get_total_time(self): + """Get total time for all operations""" + return sum(op['duration'] for op in self.operations) + + def get_category_total(self, category): + """Get total time for a specific category""" + return sum(self.categories[category]) + + def print_recap(self): + """Print a detailed recap of all timings""" + print("\n" + "="*70) + print(" TIMING SUMMARY") + print("="*70) + + if not self.operations: + print("No operations recorded") + return + + total_time = self.get_total_time() + + # Print individual operations + print() + + for op in self.operations: + percentage = (op['duration'] / total_time * 100) if total_time > 0 else 0 + bar_length = int(percentage / 2) # 50 chars = 100% + bar = "█" * bar_length + + print(f" {op['name']:<30} {op['duration']:6.2f}s {percentage:5.1f}% {bar}") + + print() + print("-" * 70) + print(f" {'TOTAL':<30} {total_time:6.2f}s 100.0%") + print("="*70) + + +async def main(run_long_tests=False): + script_start = time.time() + tracker = TimingTracker() + + print("Starting sandbox operations...") + + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + # Create sandbox with timing + print(" → Creating sandbox...") + create_start = time.time() + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="example-sandbox-timed", + wait_ready=True, + api_token=api_token, + ) + create_duration = time.time() - create_start + tracker.record("Sandbox creation", create_duration, "setup") + print(f" ✓ took {create_duration:.1f}s") + + # Check status with timing + print(" → Checking sandbox status...") + status_start = time.time() + status = await sandbox.status() + is_healthy = await sandbox.is_healthy() + status_duration = time.time() - status_start + tracker.record("Status check", status_duration, "monitoring") + print(f" ✓ took {status_duration:.1f}s") + + # Test command execution with timing + print(" → Executing initial test command...") + exec_start = time.time() + result = await sandbox.exec("echo 'Sandbox is ready!'") + exec_duration = time.time() - exec_start + tracker.record("Initial exec command", exec_duration, "execution") + print(f" ✓ took {exec_duration:.1f}s") + + if run_long_tests: + # Long test 1: Install a package + print(" → [LONG TEST] Installing a package...") + install_start = time.time() + result = await sandbox.exec("pip install requests") + install_duration = time.time() - install_start + tracker.record("Package installation", install_duration, "long_tests") + print(f" ✓ took {install_duration:.1f}s") + + # Long test 2: Run a computation + print(" → [LONG TEST] Running computation...") + compute_start = time.time() + result = await sandbox.exec("python -c 'import time; sum(range(10000000)); time.sleep(2)'") + compute_duration = time.time() - compute_start + tracker.record("Heavy computation", compute_duration, "long_tests") + print(f" ✓ took {compute_duration:.1f}s") + + # Long test 3: Multiple status checks + print(" → [LONG TEST] Multiple status checks...") + multi_check_start = time.time() + for i in range(5): + await sandbox.status() + await asyncio.sleep(0.5) + multi_check_duration = time.time() - multi_check_start + tracker.record("Multiple status checks (5x)", multi_check_duration, "long_tests") + print(f" ✓ took {multi_check_duration:.1f}s") + + except Exception as e: + print(f"\n✗ Error occurred: {e}") + import traceback + traceback.print_exc() + finally: + if sandbox: + print(" → Deleting sandbox...") + delete_start = time.time() + await sandbox.delete() + delete_duration = time.time() - delete_start + tracker.record("Sandbox deletion", delete_duration, "cleanup") + print(f" ✓ took {delete_duration:.1f}s") + + print("\n✓ All operations completed") + + # Print detailed recap + tracker.print_recap() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Create and manage a sandbox with detailed timing information" + ) + parser.add_argument( + "--long", + action="store_true", + help="Run longer tests (package installation, computation, etc.)" + ) + + args = parser.parse_args() + asyncio.run(main(run_long_tests=args.long)) diff --git a/examples/03_basic_commands_async.py b/examples/03_basic_commands_async.py new file mode 100644 index 00000000..3bb198dc --- /dev/null +++ b/examples/03_basic_commands_async.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +"""Basic command execution (async variant)""" + +import asyncio +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="basic-commands", + wait_ready=True, + api_token=api_token, + ) + + # Simple command + result = await sandbox.exec("echo 'Hello World'") + print(result.stdout.strip()) + + # Python command + result = await sandbox.exec("python3 -c 'print(2 + 2)'") + print(result.stdout.strip()) + + # Multi-line Python script + result = await sandbox.exec( + '''python3 -c " +import sys +print(f'Python version: {sys.version.split()[0]}') +print(f'Platform: {sys.platform}') +"''' + ) + print(result.stdout.strip()) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/04_streaming_output_async.py b/examples/04_streaming_output_async.py new file mode 100644 index 00000000..797f6de3 --- /dev/null +++ b/examples/04_streaming_output_async.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +"""Streaming command output (async variant)""" + +import asyncio +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="streaming", + wait_ready=True, + api_token=api_token, + ) + + # Stream output in real-time + result = await sandbox.exec( + '''python3 -c " +import time +for i in range(5): + print(f'Line {i+1}') + time.sleep(0.5) +"''', + on_stdout=lambda data: print(data.strip(), end=" "), + on_stderr=lambda data: print(f"ERR: {data.strip()}"), + ) + print(f"\nExit code: {result.exit_code}") + + # Stream a script + await sandbox.filesystem.write_file( + "/tmp/counter.py", + "#!/usr/bin/env python3\nimport time\nfor i in range(1, 6):\n print(f'Count: {i}')\n time.sleep(0.3)\nprint('Done!')\n", + ) + await sandbox.exec("chmod +x /tmp/counter.py") + + result = await sandbox.exec( + "python3 /tmp/counter.py", + on_stdout=lambda data: print(data.strip()), + ) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/05_environment_variables_async.py b/examples/05_environment_variables_async.py new file mode 100644 index 00000000..e46cd7ca --- /dev/null +++ b/examples/05_environment_variables_async.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 +"""Environment variables in commands (async variant)""" + +import asyncio +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="env-vars", + wait_ready=True, + api_token=api_token, + ) + + # Set environment variables + env_vars = {"MY_VAR": "Hello", "DEBUG": "true"} + result = await sandbox.exec("env | grep MY_VAR", env=env_vars) + print(result.stdout.strip()) + + # Use in Python command + result = await sandbox.exec( + 'python3 -c "import os; print(os.getenv(\'MY_VAR\'))"', + env={"MY_VAR": "Hello from Python!"}, + ) + print(result.stdout.strip()) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/06_working_directory_async.py b/examples/06_working_directory_async.py new file mode 100644 index 00000000..5222c858 --- /dev/null +++ b/examples/06_working_directory_async.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 +"""Working directory for commands (async variant)""" + +import asyncio +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="working-dir", + wait_ready=True, + api_token=api_token, + ) + + # Setup: create directory structure + await sandbox.exec("mkdir -p /tmp/my_project/src") + await sandbox.exec("echo 'print(\\\"hello\\\")' > /tmp/my_project/src/main.py") + + # Run command in specific directory + result = await sandbox.exec("pwd", cwd="/tmp/my_project") + print(result.stdout.strip()) + + # List files in working directory + result = await sandbox.exec("ls -la", cwd="/tmp/my_project") + print(result.stdout.strip()) + + # Use relative paths + result = await sandbox.exec("cat src/main.py", cwd="/tmp/my_project") + print(result.stdout.strip()) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/07_file_operations_async.py b/examples/07_file_operations_async.py new file mode 100644 index 00000000..93cb6c1f --- /dev/null +++ b/examples/07_file_operations_async.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +"""Basic file operations (async variant)""" + +import asyncio +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="file-ops", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Write file + content = "Hello, Koyeb Sandbox!\nThis is a test file." + await fs.write_file("/tmp/hello.txt", content) + + # Read file + file_info = await fs.read_file("/tmp/hello.txt") + print(file_info.content) + + # Write Python script + python_code = "#!/usr/bin/env python3\nprint('Hello from Python!')\n" + await fs.write_file("/tmp/script.py", python_code) + await sandbox.exec("chmod +x /tmp/script.py") + result = await sandbox.exec("/tmp/script.py") + print(result.stdout.strip()) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/08_directory_operations_async.py b/examples/08_directory_operations_async.py new file mode 100644 index 00000000..9f05cd8e --- /dev/null +++ b/examples/08_directory_operations_async.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 +"""Directory operations (async variant)""" + +import asyncio +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="directory-ops", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Create directory + await fs.mkdir("/tmp/my_project") + + # Create nested directories + await fs.mkdir("/tmp/my_project/src/utils", recursive=True) + + # List directory + contents = await fs.list_dir("/tmp/my_project") + print(f"Contents: {contents}") + + # Create project structure + await fs.mkdir("/tmp/my_project/src", recursive=True) + await fs.mkdir("/tmp/my_project/tests", recursive=True) + await fs.write_file("/tmp/my_project/src/main.py", "print('Hello')") + await fs.write_file("/tmp/my_project/README.md", "# My Project") + + # Check if path exists + exists = await fs.exists("/tmp/my_project") + is_dir = await fs.is_dir("/tmp/my_project") + is_file = await fs.is_file("/tmp/my_project/src/main.py") + print(f"Exists: {exists}, Is dir: {is_dir}, Is file: {is_file}") + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/09_binary_files_async.py b/examples/09_binary_files_async.py new file mode 100644 index 00000000..495eceab --- /dev/null +++ b/examples/09_binary_files_async.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 +"""Binary file operations (async variant)""" + +import asyncio +import base64 +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="binary-files", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Write binary data + binary_data = b"Binary data: \x00\x01\x02\x03\xff\xfe\xfd" + base64_data = base64.b64encode(binary_data).decode("utf-8") + await fs.write_file("/tmp/binary.bin", base64_data, encoding="base64") + + # Read binary data + file_info = await fs.read_file("/tmp/binary.bin", encoding="base64") + decoded = base64.b64decode(file_info.content) + print(f"Original: {binary_data}") + print(f"Decoded: {decoded}") + assert binary_data == decoded + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/10_batch_operations_async.py b/examples/10_batch_operations_async.py new file mode 100644 index 00000000..51ed7e12 --- /dev/null +++ b/examples/10_batch_operations_async.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 +"""Batch file operations (async variant)""" + +import asyncio +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="batch-ops", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Write multiple files at once + files_to_create = [ + {"path": "/tmp/file1.txt", "content": "Content of file 1"}, + {"path": "/tmp/file2.txt", "content": "Content of file 2"}, + {"path": "/tmp/file3.txt", "content": "Content of file 3"}, + ] + + await fs.write_files(files_to_create) + print("Created 3 files") + + # Verify + created_files = await fs.ls("/tmp") + batch_files = [f for f in created_files if f.startswith("file")] + print(f"Files: {batch_files}") + + # Create project structure + project_files = [ + {"path": "/tmp/project/main.py", "content": "print('Hello')"}, + {"path": "/tmp/project/utils.py", "content": "def helper(): pass"}, + {"path": "/tmp/project/README.md", "content": "# My Project"}, + ] + + await fs.mkdir("/tmp/project", recursive=True) + await fs.write_files(project_files) + print("Created project structure") + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/11_upload_download_async.py b/examples/11_upload_download_async.py new file mode 100644 index 00000000..f32fcbd3 --- /dev/null +++ b/examples/11_upload_download_async.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +"""Upload and download files (async variant)""" + +import asyncio +import os +import tempfile + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="upload-download", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Upload local file to sandbox + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".txt") as f: + f.write("This is a local file\n") + f.write("Uploaded to Koyeb Sandbox!") + local_file = f.name + + try: + await fs.upload_file(local_file, "/tmp/uploaded_file.txt") + uploaded_info = await fs.read_file("/tmp/uploaded_file.txt") + print(uploaded_info.content) + finally: + os.unlink(local_file) + + # Download file from sandbox + await fs.write_file( + "/tmp/download_source.txt", "Download test content\nMultiple lines" + ) + + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix="_downloaded.txt") as f: + download_path = f.name + + try: + await fs.download_file("/tmp/download_source.txt", download_path) + with open(download_path, "r") as f: + print(f.read()) + finally: + os.unlink(download_path) + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/12_file_manipulation_async.py b/examples/12_file_manipulation_async.py new file mode 100644 index 00000000..5afb8abe --- /dev/null +++ b/examples/12_file_manipulation_async.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +"""File manipulation operations (async variant)""" + +import asyncio +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="python:3.11", + name="file-manip", + wait_ready=True, + api_token=api_token, + ) + + fs = sandbox.filesystem + + # Setup + await fs.write_file("/tmp/file1.txt", "Content of file 1") + await fs.write_file("/tmp/file2.txt", "Content of file 2") + await fs.mkdir("/tmp/test_dir", recursive=True) + + # Rename file + await fs.rename_file("/tmp/file1.txt", "/tmp/renamed_file.txt") + renamed_exists = await fs.exists("/tmp/renamed_file.txt") + print(f"Renamed: {renamed_exists}") + + # Move file + await fs.move_file("/tmp/file2.txt", "/tmp/test_dir/moved_file.txt") + moved_exists = await fs.exists("/tmp/test_dir/moved_file.txt") + print(f"Moved: {moved_exists}") + + # Copy file (read + write) + original_content = await fs.read_file("/tmp/renamed_file.txt") + await fs.write_file("/tmp/test_dir/copied_file.txt", original_content.content) + copied_exists = await fs.exists("/tmp/test_dir/copied_file.txt") + print(f"Copied: {copied_exists}") + + # Delete file + await fs.rm("/tmp/renamed_file.txt") + deleted_check = not await fs.exists("/tmp/renamed_file.txt") + print(f"Deleted: {deleted_check}") + + # Delete directory + await fs.rm("/tmp/test_dir", recursive=True) + dir_deleted_check = not await fs.exists("/tmp/test_dir") + print(f"Directory deleted: {dir_deleted_check}") + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) From 20869223b7a3b57a24c619c3f7bb4422bad3f2c7 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 09:55:32 +0100 Subject: [PATCH 13/47] Add run_all utility to run all examples --- examples/00_run_all.py | 157 +++++++++++++++++++++++++++++++++++ examples/00_run_all_async.py | 157 +++++++++++++++++++++++++++++++++++ 2 files changed, 314 insertions(+) create mode 100644 examples/00_run_all.py create mode 100644 examples/00_run_all_async.py diff --git a/examples/00_run_all.py b/examples/00_run_all.py new file mode 100644 index 00000000..b238d554 --- /dev/null +++ b/examples/00_run_all.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python3 +"""Run all synchronous example scripts in order""" + +import os +import subprocess +import sys +import time +from pathlib import Path + + +def main(): + # Get the examples directory + examples_dir = Path(__file__).parent + + # Find all Python files, excluding this script and async variants + example_files = sorted([ + f for f in examples_dir.glob("*.py") + if f.name not in ["00_run_all.py", "00_async_run_all.py"] + and not f.name.endswith("_async.py") + ]) + + if not example_files: + print("No example files found to run") + return 0 + + print(f"Found {len(example_files)} example(s) to run\n") + print("=" * 70) + + total_start = time.time() + results = [] + + for example_file in example_files: + example_name = example_file.name + print(f"\n▶ Running: {example_name}") + print("-" * 70) + + start_time = time.time() + + try: + # Run the example script + result = subprocess.run( + [sys.executable, str(example_file)], + capture_output=True, + text=True, + timeout=60 # 60 second timeout per script + ) + + elapsed_time = time.time() - start_time + + # Print output + if result.stdout: + print(result.stdout) + + # Check for errors + if result.returncode != 0: + print(f"\n❌ ERROR in {example_name}") + if result.stderr: + print("STDERR:") + print(result.stderr) + + results.append({ + "name": example_name, + "status": "FAILED", + "time": elapsed_time, + "error": result.stderr or "Non-zero exit code" + }) + + # Break on error + print("\n" + "=" * 70) + print("STOPPING: Error encountered") + print("=" * 70) + print_summary(results, time.time() - total_start) + return 1 + else: + results.append({ + "name": example_name, + "status": "PASSED", + "time": elapsed_time + }) + print(f"✓ Completed in {elapsed_time:.2f}s") + + except subprocess.TimeoutExpired: + elapsed_time = time.time() - start_time + print(f"\n❌ TIMEOUT in {example_name} after {elapsed_time:.2f}s") + + results.append({ + "name": example_name, + "status": "TIMEOUT", + "time": elapsed_time, + "error": "Script exceeded 60 second timeout" + }) + + # Break on timeout + print("\n" + "=" * 70) + print("STOPPING: Timeout encountered") + print("=" * 70) + print_summary(results, time.time() - total_start) + return 1 + + except Exception as e: + elapsed_time = time.time() - start_time + print(f"\n❌ EXCEPTION in {example_name}: {e}") + + results.append({ + "name": example_name, + "status": "ERROR", + "time": elapsed_time, + "error": str(e) + }) + + # Break on exception + print("\n" + "=" * 70) + print("STOPPING: Exception encountered") + print("=" * 70) + print_summary(results, time.time() - total_start) + return 1 + + total_time = time.time() - total_start + + # Print summary + print("\n" + "=" * 70) + print("ALL EXAMPLES COMPLETED SUCCESSFULLY") + print("=" * 70) + print_summary(results, total_time) + + return 0 + + +def print_summary(results, total_time): + """Print execution summary""" + print("\n📊 EXECUTION SUMMARY") + print("-" * 70) + + for result in results: + status_symbol = { + "PASSED": "✓", + "FAILED": "❌", + "TIMEOUT": "⏱", + "ERROR": "❌" + }.get(result["status"], "?") + + print(f"{status_symbol} {result['name']:40s} {result['time']:>6.2f}s {result['status']}") + + if "error" in result: + error_preview = result["error"].split("\n")[0][:50] + print(f" Error: {error_preview}") + + print("-" * 70) + print(f"Total execution time: {total_time:.2f}s") + + passed = sum(1 for r in results if r["status"] == "PASSED") + total = len(results) + print(f"Results: {passed}/{total} passed") + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/examples/00_run_all_async.py b/examples/00_run_all_async.py new file mode 100644 index 00000000..3a899e9b --- /dev/null +++ b/examples/00_run_all_async.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python3 +"""Run all asynchronous example scripts in order""" + +import asyncio +import os +import subprocess +import sys +import time +from pathlib import Path + + +async def run_example(example_file): + """Run a single example script and return results""" + example_name = example_file.name + print(f"\n▶ Running: {example_name}") + print("-" * 70) + + start_time = time.time() + + try: + # Run the example script + process = await asyncio.create_subprocess_exec( + sys.executable, + str(example_file), + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE + ) + + try: + stdout, stderr = await asyncio.wait_for( + process.communicate(), + timeout=60 # 60 second timeout per script + ) + except asyncio.TimeoutError: + process.kill() + await process.wait() + elapsed_time = time.time() - start_time + print(f"\n❌ TIMEOUT in {example_name} after {elapsed_time:.2f}s") + return { + "name": example_name, + "status": "TIMEOUT", + "time": elapsed_time, + "error": "Script exceeded 60 second timeout" + } + + elapsed_time = time.time() - start_time + + # Print output + if stdout: + print(stdout.decode()) + + # Check for errors + if process.returncode != 0: + print(f"\n❌ ERROR in {example_name}") + if stderr: + print("STDERR:") + print(stderr.decode()) + + return { + "name": example_name, + "status": "FAILED", + "time": elapsed_time, + "error": stderr.decode() if stderr else "Non-zero exit code" + } + else: + print(f"✓ Completed in {elapsed_time:.2f}s") + return { + "name": example_name, + "status": "PASSED", + "time": elapsed_time + } + + except Exception as e: + elapsed_time = time.time() - start_time + print(f"\n❌ EXCEPTION in {example_name}: {e}") + + return { + "name": example_name, + "status": "ERROR", + "time": elapsed_time, + "error": str(e) + } + + +async def main(): + # Get the examples directory + examples_dir = Path(__file__).parent + + # Find all async Python files, excluding this script + example_files = sorted([ + f for f in examples_dir.glob("*_async.py") + if f.name != "00_async_run_all.py" + ]) + + if not example_files: + print("No async example files found to run") + return 0 + + print(f"Found {len(example_files)} async example(s) to run\n") + print("=" * 70) + + total_start = time.time() + results = [] + + # Run examples sequentially to maintain order and stop on first error + for example_file in example_files: + result = await run_example(example_file) + results.append(result) + + # Break on error + if result["status"] in ["FAILED", "TIMEOUT", "ERROR"]: + print("\n" + "=" * 70) + print("STOPPING: Error encountered") + print("=" * 70) + print_summary(results, time.time() - total_start) + return 1 + + total_time = time.time() - total_start + + # Print summary + print("\n" + "=" * 70) + print("ALL ASYNC EXAMPLES COMPLETED SUCCESSFULLY") + print("=" * 70) + print_summary(results, total_time) + + return 0 + + +def print_summary(results, total_time): + """Print execution summary""" + print("\n📊 EXECUTION SUMMARY") + print("-" * 70) + + for result in results: + status_symbol = { + "PASSED": "✓", + "FAILED": "❌", + "TIMEOUT": "⏱", + "ERROR": "❌" + }.get(result["status"], "?") + + print(f"{status_symbol} {result['name']:40s} {result['time']:>6.2f}s {result['status']}") + + if "error" in result: + error_preview = result["error"].split("\n")[0][:50] + print(f" Error: {error_preview}") + + print("-" * 70) + print(f"Total execution time: {total_time:.2f}s") + + passed = sum(1 for r in results if r["status"] == "PASSED") + total = len(results) + print(f"Results: {passed}/{total} passed") + + +if __name__ == "__main__": + sys.exit(asyncio.run(main())) From aae6ad967dcbca5073e128ec5703302e38acd51e Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 10:07:31 +0100 Subject: [PATCH 14/47] Fix rename of the script --- examples/00_run_all.py | 2 +- examples/00_run_all_async.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/00_run_all.py b/examples/00_run_all.py index b238d554..b96d4d6a 100644 --- a/examples/00_run_all.py +++ b/examples/00_run_all.py @@ -15,7 +15,7 @@ def main(): # Find all Python files, excluding this script and async variants example_files = sorted([ f for f in examples_dir.glob("*.py") - if f.name not in ["00_run_all.py", "00_async_run_all.py"] + if f.name not in ["00_run_all.py", "00_run_all.py"] and not f.name.endswith("_async.py") ]) diff --git a/examples/00_run_all_async.py b/examples/00_run_all_async.py index 3a899e9b..6007e7c4 100644 --- a/examples/00_run_all_async.py +++ b/examples/00_run_all_async.py @@ -89,7 +89,7 @@ async def main(): # Find all async Python files, excluding this script example_files = sorted([ f for f in examples_dir.glob("*_async.py") - if f.name != "00_async_run_all.py" + if f.name != "00_run_all_async.py" ]) if not example_files: From 042285146e956c9b9ffbcaa0a48f293b3ce20c60 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 10:27:14 +0100 Subject: [PATCH 15/47] Add sandbox documentation --- README.md | 3 +- docs/sandbox.md | 1242 ++++++++++++++++++++++++++++++++++++++ scripts/generate_docs.sh | 1 + 3 files changed, 1245 insertions(+), 1 deletion(-) create mode 100644 docs/sandbox.md diff --git a/README.md b/README.md index e3175f97..f1351e8e 100644 --- a/README.md +++ b/README.md @@ -4,4 +4,5 @@ This is the official Python SDK for Koyeb, a platform that allows you to deploy # Modules -- `koyeb.api`: Contains the API client and methods to interact with Koyeb's REST API. [Documentation](./koyeb/api_README.md) +- `koyeb.api`: Contains the API client and methods to interact with Koyeb's REST API. [Documentation](./docs/api.md) +- `koyeb.sandbox`: Contains the Sandbox module. [Documentation](./docs/sandbox.md) diff --git a/docs/sandbox.md b/docs/sandbox.md new file mode 100644 index 00000000..d24f9e9d --- /dev/null +++ b/docs/sandbox.md @@ -0,0 +1,1242 @@ + + +# koyeb/sandbox + +Koyeb Sandbox - Interactive execution environment for running arbitrary code on Koyeb + + + +# koyeb/sandbox.exec + +Command execution utilities for Koyeb Sandbox instances +Using WebSocket connection to Koyeb API + + + +## CommandStatus Objects + +```python +class CommandStatus(str, Enum) +``` + +Command execution status + + + +## CommandResult Objects + +```python +@dataclass +class CommandResult() +``` + +Result of a command execution using Koyeb API models + + + +#### success + +```python +@property +def success() -> bool +``` + +Check if command executed successfully + + + +#### output + +```python +@property +def output() -> str +``` + +Get combined stdout and stderr output + + + +## SandboxCommandError Objects + +```python +class SandboxCommandError(SandboxError) +``` + +Raised when command execution fails + + + +## SandboxExecutor Objects + +```python +class SandboxExecutor() +``` + +Synchronous command execution interface for Koyeb Sandbox instances. +Bound to a specific sandbox instance. + +For async usage, use AsyncSandboxExecutor instead. + + + +#### \_\_call\_\_ + +```python +def __call__( + command: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + timeout: int = 30, + on_stdout: Optional[Callable[[str], None]] = None, + on_stderr: Optional[Callable[[str], None]] = None) -> CommandResult +``` + +Execute a command in a shell synchronously. Supports streaming output via callbacks. + +**Arguments**: + +- `command` - Command to execute as a string (e.g., "python -c 'print(2+2)'") +- `cwd` - Working directory for the command +- `env` - Environment variables for the command +- `timeout` - Command timeout in seconds +- `on_stdout` - Optional callback for streaming stdout chunks +- `on_stderr` - Optional callback for streaming stderr chunks + + +**Returns**: + +- `CommandResult` - Result of the command execution + + +**Example**: + + ```python + # Synchronous execution + result = sandbox.exec("echo hello") + + # With streaming callbacks + result = sandbox.exec( + "echo hello; sleep 1; echo world", + on_stdout=lambda data: print(f"OUT: {data}"), + on_stderr=lambda data: print(f"ERR: {data}"), + ) + ``` + + + +## AsyncSandboxExecutor Objects + +```python +class AsyncSandboxExecutor(SandboxExecutor) +``` + +Async command execution interface for Koyeb Sandbox instances. +Bound to a specific sandbox instance. + +Inherits from SandboxExecutor and provides async command execution. + + + +#### \_\_call\_\_ + +```python +async def __call__( + command: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + timeout: int = 30, + on_stdout: Optional[Callable[[str], None]] = None, + on_stderr: Optional[Callable[[str], None]] = None) -> CommandResult +``` + +Execute a command in a shell asynchronously. Supports streaming output via callbacks. + +**Arguments**: + +- `command` - Command to execute as a string (e.g., "python -c 'print(2+2)'") +- `cwd` - Working directory for the command +- `env` - Environment variables for the command +- `timeout` - Command timeout in seconds +- `on_stdout` - Optional callback for streaming stdout chunks +- `on_stderr` - Optional callback for streaming stderr chunks + + +**Returns**: + +- `CommandResult` - Result of the command execution + + +**Example**: + + ```python + # Async execution + result = await sandbox.exec("echo hello") + + # With streaming callbacks + result = await sandbox.exec( + "echo hello; sleep 1; echo world", + on_stdout=lambda data: print(f"OUT: {data}"), + on_stderr=lambda data: print(f"ERR: {data}"), + ) + ``` + + + +# koyeb/sandbox.filesystem + +Filesystem operations for Koyeb Sandbox instances +Using only the primitives available in the Koyeb API + + + +## SandboxFilesystemError Objects + +```python +class SandboxFilesystemError(SandboxError) +``` + +Base exception for filesystem operations + + + +## FileNotFoundError Objects + +```python +class FileNotFoundError(SandboxFilesystemError) +``` + +Raised when file or directory not found + + + +## FileExistsError Objects + +```python +class FileExistsError(SandboxFilesystemError) +``` + +Raised when file already exists + + + +## FileInfo Objects + +```python +@dataclass +class FileInfo() +``` + +File information + + + +## SandboxFilesystem Objects + +```python +class SandboxFilesystem() +``` + +Synchronous filesystem operations for Koyeb Sandbox instances. +Using only the primitives available in the Koyeb API. + +For async usage, use AsyncSandboxFilesystem instead. + + + +#### write\_file + +```python +def write_file(path: str, + content: Union[str, bytes], + encoding: str = "utf-8") -> None +``` + +Write content to a file synchronously. + +**Arguments**: + +- `path` - Absolute path to the file +- `content` - Content to write (string or bytes) +- `encoding` - File encoding (default: "utf-8"). Use "base64" for binary data. + + + +#### read\_file + +```python +def read_file(path: str, encoding: str = "utf-8") -> FileInfo +``` + +Read a file from the sandbox synchronously. + +**Arguments**: + +- `path` - Absolute path to the file +- `encoding` - File encoding (default: "utf-8"). Use "base64" for binary data. + + +**Returns**: + +- `FileInfo` - Object with content and encoding + + + +#### mkdir + +```python +def mkdir(path: str, recursive: bool = False) -> None +``` + +Create a directory synchronously. + +**Arguments**: + +- `path` - Absolute path to the directory +- `recursive` - Create parent directories if needed (default: False) + + + +#### list\_dir + +```python +def list_dir(path: str = ".") -> List[str] +``` + +List contents of a directory synchronously. + +**Arguments**: + +- `path` - Path to the directory (default: current directory) + + +**Returns**: + +- `List[str]` - Names of files and directories within the specified path. + + + +#### delete\_file + +```python +def delete_file(path: str) -> None +``` + +Delete a file synchronously. + +**Arguments**: + +- `path` - Absolute path to the file + + + +#### delete\_dir + +```python +def delete_dir(path: str) -> None +``` + +Delete a directory synchronously. + +**Arguments**: + +- `path` - Absolute path to the directory + + + +#### rename\_file + +```python +def rename_file(old_path: str, new_path: str) -> None +``` + +Rename a file synchronously. + +**Arguments**: + +- `old_path` - Current file path +- `new_path` - New file path + + + +#### move\_file + +```python +def move_file(source_path: str, destination_path: str) -> None +``` + +Move a file to a different directory synchronously. + +**Arguments**: + +- `source_path` - Current file path +- `destination_path` - Destination path + + + +#### write\_files + +```python +def write_files(files: List[Dict[str, str]]) -> None +``` + +Write multiple files in a single operation synchronously. + +**Arguments**: + +- `files` - List of dictionaries, each with 'path', 'content', and optional 'encoding'. + + + +#### exists + +```python +def exists(path: str) -> bool +``` + +Check if file/directory exists synchronously + + + +#### is\_file + +```python +def is_file(path: str) -> bool +``` + +Check if path is a file synchronously + + + +#### is\_dir + +```python +def is_dir(path: str) -> bool +``` + +Check if path is a directory synchronously + + + +#### upload\_file + +```python +def upload_file(local_path: str, remote_path: str) -> None +``` + +Upload a local file to the sandbox synchronously. + +**Arguments**: + +- `local_path` - Path to the local file +- `remote_path` - Destination path in the sandbox + + + +#### download\_file + +```python +def download_file(remote_path: str, local_path: str) -> None +``` + +Download a file from the sandbox to a local path synchronously. + +**Arguments**: + +- `remote_path` - Path to the file in the sandbox +- `local_path` - Destination path on the local filesystem + + + +#### ls + +```python +def ls(path: str = ".") -> List[str] +``` + +List directory contents synchronously. + +**Arguments**: + +- `path` - Path to list + + +**Returns**: + + List of file/directory names + + + +#### rm + +```python +def rm(path: str, recursive: bool = False) -> None +``` + +Remove file or directory synchronously. + +**Arguments**: + +- `path` - Path to remove +- `recursive` - Remove recursively + + + +#### open + +```python +def open(path: str, mode: str = "r") -> "SandboxFileIO" +``` + +Open a file in the sandbox synchronously. + +**Arguments**: + +- `path` - Path to the file +- `mode` - Open mode ('r', 'w', 'a', etc.) + + +**Returns**: + +- `SandboxFileIO` - File handle + + + +## AsyncSandboxFilesystem Objects + +```python +class AsyncSandboxFilesystem(SandboxFilesystem) +``` + +Async filesystem operations for Koyeb Sandbox instances. +Inherits from SandboxFilesystem and provides async methods. + + + +#### write\_file + +```python +async def write_file(path: str, + content: Union[str, bytes], + encoding: str = "utf-8") -> None +``` + +Write content to a file asynchronously. + +**Arguments**: + +- `path` - Absolute path to the file +- `content` - Content to write (string or bytes) +- `encoding` - File encoding (default: "utf-8"). Use "base64" for binary data. + + + +#### read\_file + +```python +async def read_file(path: str, encoding: str = "utf-8") -> FileInfo +``` + +Read a file from the sandbox asynchronously. + +**Arguments**: + +- `path` - Absolute path to the file +- `encoding` - File encoding (default: "utf-8"). Use "base64" for binary data. + + +**Returns**: + +- `FileInfo` - Object with content and encoding + + + +#### mkdir + +```python +async def mkdir(path: str, recursive: bool = False) -> None +``` + +Create a directory asynchronously. + +**Arguments**: + +- `path` - Absolute path to the directory +- `recursive` - Create parent directories if needed (default: False) + + + +#### list\_dir + +```python +async def list_dir(path: str = ".") -> List[str] +``` + +List contents of a directory asynchronously. + +**Arguments**: + +- `path` - Path to the directory (default: current directory) + + +**Returns**: + +- `List[str]` - Names of files and directories within the specified path. + + + +#### delete\_file + +```python +async def delete_file(path: str) -> None +``` + +Delete a file asynchronously. + +**Arguments**: + +- `path` - Absolute path to the file + + + +#### delete\_dir + +```python +async def delete_dir(path: str) -> None +``` + +Delete a directory asynchronously. + +**Arguments**: + +- `path` - Absolute path to the directory + + + +#### rename\_file + +```python +async def rename_file(old_path: str, new_path: str) -> None +``` + +Rename a file asynchronously. + +**Arguments**: + +- `old_path` - Current file path +- `new_path` - New file path + + + +#### move\_file + +```python +async def move_file(source_path: str, destination_path: str) -> None +``` + +Move a file to a different directory asynchronously. + +**Arguments**: + +- `source_path` - Current file path +- `destination_path` - Destination path + + + +#### write\_files + +```python +async def write_files(files: List[Dict[str, str]]) -> None +``` + +Write multiple files in a single operation asynchronously. + +**Arguments**: + +- `files` - List of dictionaries, each with 'path', 'content', and optional 'encoding'. + + + +#### exists + +```python +async def exists(path: str) -> bool +``` + +Check if file/directory exists asynchronously + + + +#### is\_file + +```python +async def is_file(path: str) -> bool +``` + +Check if path is a file asynchronously + + + +#### is\_dir + +```python +async def is_dir(path: str) -> bool +``` + +Check if path is a directory asynchronously + + + +#### upload\_file + +```python +async def upload_file(local_path: str, remote_path: str) -> None +``` + +Upload a local file to the sandbox asynchronously. + +**Arguments**: + +- `local_path` - Path to the local file +- `remote_path` - Destination path in the sandbox + + + +#### download\_file + +```python +async def download_file(remote_path: str, local_path: str) -> None +``` + +Download a file from the sandbox to a local path asynchronously. + +**Arguments**: + +- `remote_path` - Path to the file in the sandbox +- `local_path` - Destination path on the local filesystem + + + +#### ls + +```python +async def ls(path: str = ".") -> List[str] +``` + +List directory contents asynchronously. + +**Arguments**: + +- `path` - Path to list + + +**Returns**: + + List of file/directory names + + + +#### rm + +```python +async def rm(path: str, recursive: bool = False) -> None +``` + +Remove file or directory asynchronously. + +**Arguments**: + +- `path` - Path to remove +- `recursive` - Remove recursively + + + +#### open + +```python +def open(path: str, mode: str = "r") -> "AsyncSandboxFileIO" +``` + +Open a file in the sandbox asynchronously. + +**Arguments**: + +- `path` - Path to the file +- `mode` - Open mode ('r', 'w', 'a', etc.) + + +**Returns**: + +- `AsyncSandboxFileIO` - Async file handle + + + +## SandboxFileIO Objects + +```python +class SandboxFileIO() +``` + +Synchronous file I/O handle for sandbox files + + + +#### read + +```python +def read() -> str +``` + +Read file content synchronously + + + +#### write + +```python +def write(content: str) -> None +``` + +Write content to file synchronously + + + +#### close + +```python +def close() -> None +``` + +Close the file + + + +## AsyncSandboxFileIO Objects + +```python +class AsyncSandboxFileIO() +``` + +Async file I/O handle for sandbox files + + + +#### read + +```python +async def read() -> str +``` + +Read file content asynchronously + + + +#### write + +```python +async def write(content: str) -> None +``` + +Write content to file asynchronously + + + +#### close + +```python +def close() -> None +``` + +Close the file + + + +# koyeb/sandbox.sandbox + +Koyeb Sandbox - Python SDK for creating and managing Koyeb sandboxes + + + +## Sandbox Objects + +```python +class Sandbox() +``` + +Synchronous sandbox for running code on Koyeb infrastructure. +Provides creation and deletion functionality with proper health polling. + + + +#### create + +```python +@classmethod +def create(cls, + image: str = "docker.io/library/ubuntu:latest", + name: str = "quick-sandbox", + wait_ready: bool = True, + instance_type: str = "nano", + ports: Optional[List[DeploymentPort]] = None, + env: Optional[Dict[str, str]] = None, + regions: Optional[List[str]] = None, + api_token: Optional[str] = None, + timeout: int = 300) -> "Sandbox" +``` + +Create a new sandbox instance. + +**Arguments**: + +- `image` - Docker image to use (default: ubuntu:latest) +- `name` - Name of the sandbox +- `wait_ready` - Wait for sandbox to be ready (default: True) +- `instance_type` - Instance type (default: nano) +- `ports` - List of ports to expose +- `env` - Environment variables +- `regions` - List of regions to deploy to (default: ["na"]) +- `api_token` - Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) +- `timeout` - Timeout for sandbox creation in seconds + + +**Returns**: + +- `Sandbox` - A new Sandbox instance + + + +#### wait\_ready + +```python +def wait_ready(timeout: int = 60, poll_interval: float = 2.0) -> bool +``` + +Wait for sandbox to become ready with proper polling. + +**Arguments**: + +- `timeout` - Maximum time to wait in seconds +- `poll_interval` - Time between health checks in seconds + + +**Returns**: + +- `bool` - True if sandbox became ready, False if timeout + + + +#### delete + +```python +def delete() -> None +``` + +Delete the sandbox instance. + + + +#### status + +```python +def status() -> str +``` + +Get current sandbox status + + + +#### is\_healthy + +```python +def is_healthy() -> bool +``` + +Check if sandbox is healthy and ready for operations + + + +#### filesystem + +```python +@property +def filesystem() +``` + +Get filesystem operations interface + + + +#### exec + +```python +@property +def exec() +``` + +Get command execution interface + + + +## AsyncSandbox Objects + +```python +class AsyncSandbox(Sandbox) +``` + +Async sandbox for running code on Koyeb infrastructure. +Inherits from Sandbox and provides async wrappers for all operations. + + + +#### create + +```python +@classmethod +async def create(cls, + image: str = "docker.io/library/ubuntu:latest", + name: str = "quick-sandbox", + wait_ready: bool = True, + instance_type: str = "nano", + ports: Optional[List[DeploymentPort]] = None, + env: Optional[Dict[str, str]] = None, + regions: Optional[List[str]] = None, + api_token: Optional[str] = None, + timeout: int = 300) -> "AsyncSandbox" +``` + +Create a new sandbox instance with async support. + +**Arguments**: + +- `image` - Docker image to use (default: ubuntu:latest) +- `name` - Name of the sandbox +- `wait_ready` - Wait for sandbox to be ready (default: True) +- `instance_type` - Instance type (default: nano) +- `ports` - List of ports to expose +- `env` - Environment variables +- `regions` - List of regions to deploy to (default: ["na"]) +- `api_token` - Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) +- `timeout` - Timeout for sandbox creation in seconds + + +**Returns**: + +- `AsyncSandbox` - A new AsyncSandbox instance + + + +#### wait\_ready + +```python +async def wait_ready(timeout: int = 60, poll_interval: float = 2.0) -> bool +``` + +Wait for sandbox to become ready with proper async polling. + +**Arguments**: + +- `timeout` - Maximum time to wait in seconds +- `poll_interval` - Time between health checks in seconds + + +**Returns**: + +- `bool` - True if sandbox became ready, False if timeout + + + +#### delete + +```python +async def delete() -> None +``` + +Delete the sandbox instance asynchronously. + + + +#### status + +```python +async def status() -> str +``` + +Get current sandbox status asynchronously + + + +#### is\_healthy + +```python +async def is_healthy() -> bool +``` + +Check if sandbox is healthy and ready for operations asynchronously + + + +#### exec + +```python +@property +def exec() +``` + +Get async command execution interface + + + +#### filesystem + +```python +@property +def filesystem() +``` + +Get filesystem operations interface + + + +# koyeb/sandbox.utils + +Utility functions for Koyeb Sandbox + + + +#### get\_api\_client + +```python +def get_api_client( + api_token: Optional[str] = None, + host: Optional[str] = None +) -> tuple[AppsApi, ServicesApi, InstancesApi] +``` + +Get configured API clients for Koyeb operations. + +**Arguments**: + +- `api_token` - Koyeb API token. If not provided, will try to get from KOYEB_API_TOKEN env var +- `host` - Koyeb API host URL. If not provided, will try to get from KOYEB_API_HOST env var (defaults to https://app.koyeb.com) + + +**Returns**: + + Tuple of (AppsApi, ServicesApi, InstancesApi) instances + + +**Raises**: + +- `ValueError` - If API token is not provided + + + +#### build\_env\_vars + +```python +def build_env_vars(env: Optional[Dict[str, str]]) -> List[DeploymentEnv] +``` + +Build environment variables list from dictionary. + +**Arguments**: + +- `env` - Dictionary of environment variables + + +**Returns**: + + List of DeploymentEnv objects + + + +#### create\_docker\_source + +```python +def create_docker_source(image: str, command_args: List[str]) -> DockerSource +``` + +Create Docker source configuration. + +**Arguments**: + +- `image` - Docker image name +- `command_args` - Command and arguments to run + + +**Returns**: + + DockerSource object + + + +#### create\_deployment\_definition + +```python +def create_deployment_definition( + name: str, + docker_source: DockerSource, + env_vars: List[DeploymentEnv], + instance_type: str, + ports: Optional[List[DeploymentPort]] = None, + regions: List[str] = None) -> DeploymentDefinition +``` + +Create deployment definition for a sandbox service. + +**Arguments**: + +- `name` - Service name +- `docker_source` - Docker configuration +- `env_vars` - Environment variables +- `instance_type` - Instance type +- `ports` - List of ports (if provided, type becomes WEB, otherwise WORKER) +- `regions` - List of regions (defaults to North America) + + +**Returns**: + + DeploymentDefinition object + + + +#### get\_sandbox\_status + +```python +def get_sandbox_status(instance_id: str, + api_token: Optional[str] = None) -> InstanceStatus +``` + +Get the current status of a sandbox instance. + + + +#### is\_sandbox\_healthy + +```python +def is_sandbox_healthy(instance_id: str, + api_token: Optional[str] = None) -> bool +``` + +Check if sandbox is healthy and ready for operations. + + + +#### ensure\_sandbox\_healthy + +```python +def ensure_sandbox_healthy(instance_id: str, + api_token: Optional[str] = None) -> None +``` + +Ensure a sandbox instance is healthy, raising an exception if not. + + + +## SandboxError Objects + +```python +class SandboxError(Exception) +``` + +Base exception for sandbox operations + diff --git a/scripts/generate_docs.sh b/scripts/generate_docs.sh index 93f91719..edf1a23b 100755 --- a/scripts/generate_docs.sh +++ b/scripts/generate_docs.sh @@ -1 +1,2 @@ uv run pydoc-markdown -p koyeb/api >docs/api.md +uv run pydoc-markdown -p koyeb/sandbox >docs/sandbox.md From edca77d078fcba663b237b1b67f4bb946cdbc62c Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 12:26:03 +0100 Subject: [PATCH 16/47] Update default base image --- examples/01_create_sandbox.py | 2 +- examples/01_create_sandbox_async.py | 2 +- examples/02_create_sandbox_with_timing.py | 2 +- .../02_create_sandbox_with_timing_async.py | 2 +- examples/03_basic_commands.py | 2 +- examples/03_basic_commands_async.py | 2 +- examples/04_streaming_output.py | 2 +- examples/04_streaming_output_async.py | 2 +- examples/05_environment_variables.py | 2 +- examples/05_environment_variables_async.py | 2 +- examples/06_working_directory.py | 2 +- examples/06_working_directory_async.py | 2 +- examples/07_file_operations.py | 2 +- examples/07_file_operations_async.py | 2 +- examples/08_directory_operations.py | 2 +- examples/08_directory_operations_async.py | 2 +- examples/09_binary_files.py | 2 +- examples/09_binary_files_async.py | 2 +- examples/10_batch_operations.py | 2 +- examples/10_batch_operations_async.py | 2 +- examples/11_upload_download.py | 2 +- examples/11_upload_download_async.py | 2 +- examples/12_file_manipulation.py | 2 +- examples/12_file_manipulation_async.py | 2 +- examples/README.md | 2 +- koyeb/sandbox/exec.py | 11 +++- koyeb/sandbox/filesystem.py | 13 +++++ koyeb/sandbox/sandbox.py | 33 +++++++++--- koyeb/sandbox/utils.py | 52 ++++++++++++++++++- 29 files changed, 126 insertions(+), 33 deletions(-) diff --git a/examples/01_create_sandbox.py b/examples/01_create_sandbox.py index 2c33fced..386a1a5b 100644 --- a/examples/01_create_sandbox.py +++ b/examples/01_create_sandbox.py @@ -15,7 +15,7 @@ def main(): sandbox = None try: sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="example-sandbox", wait_ready=True, api_token=api_token, diff --git a/examples/01_create_sandbox_async.py b/examples/01_create_sandbox_async.py index 5563b780..33dfe19c 100644 --- a/examples/01_create_sandbox_async.py +++ b/examples/01_create_sandbox_async.py @@ -16,7 +16,7 @@ async def main(): sandbox = None try: sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="example-sandbox", wait_ready=True, api_token=api_token, diff --git a/examples/02_create_sandbox_with_timing.py b/examples/02_create_sandbox_with_timing.py index d8832ede..1cca958d 100644 --- a/examples/02_create_sandbox_with_timing.py +++ b/examples/02_create_sandbox_with_timing.py @@ -80,7 +80,7 @@ def main(run_long_tests=False): print(" → Creating sandbox...") create_start = time.time() sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="example-sandbox-timed", wait_ready=True, api_token=api_token, diff --git a/examples/02_create_sandbox_with_timing_async.py b/examples/02_create_sandbox_with_timing_async.py index 2abe37c9..fd16df1a 100644 --- a/examples/02_create_sandbox_with_timing_async.py +++ b/examples/02_create_sandbox_with_timing_async.py @@ -81,7 +81,7 @@ async def main(run_long_tests=False): print(" → Creating sandbox...") create_start = time.time() sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="example-sandbox-timed", wait_ready=True, api_token=api_token, diff --git a/examples/03_basic_commands.py b/examples/03_basic_commands.py index 65e70391..6305b44a 100644 --- a/examples/03_basic_commands.py +++ b/examples/03_basic_commands.py @@ -15,7 +15,7 @@ def main(): sandbox = None try: sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="basic-commands", wait_ready=True, api_token=api_token, diff --git a/examples/03_basic_commands_async.py b/examples/03_basic_commands_async.py index 3bb198dc..92c75749 100644 --- a/examples/03_basic_commands_async.py +++ b/examples/03_basic_commands_async.py @@ -16,7 +16,7 @@ async def main(): sandbox = None try: sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="basic-commands", wait_ready=True, api_token=api_token, diff --git a/examples/04_streaming_output.py b/examples/04_streaming_output.py index 868763b3..9ce5e2d2 100644 --- a/examples/04_streaming_output.py +++ b/examples/04_streaming_output.py @@ -15,7 +15,7 @@ def main(): sandbox = None try: sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="streaming", wait_ready=True, api_token=api_token, diff --git a/examples/04_streaming_output_async.py b/examples/04_streaming_output_async.py index 797f6de3..98b0270c 100644 --- a/examples/04_streaming_output_async.py +++ b/examples/04_streaming_output_async.py @@ -16,7 +16,7 @@ async def main(): sandbox = None try: sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="streaming", wait_ready=True, api_token=api_token, diff --git a/examples/05_environment_variables.py b/examples/05_environment_variables.py index 36963d00..cfb1c8a8 100644 --- a/examples/05_environment_variables.py +++ b/examples/05_environment_variables.py @@ -15,7 +15,7 @@ def main(): sandbox = None try: sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="env-vars", wait_ready=True, api_token=api_token, diff --git a/examples/05_environment_variables_async.py b/examples/05_environment_variables_async.py index e46cd7ca..a91cd1af 100644 --- a/examples/05_environment_variables_async.py +++ b/examples/05_environment_variables_async.py @@ -16,7 +16,7 @@ async def main(): sandbox = None try: sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="env-vars", wait_ready=True, api_token=api_token, diff --git a/examples/06_working_directory.py b/examples/06_working_directory.py index 30c3583d..ae190d54 100644 --- a/examples/06_working_directory.py +++ b/examples/06_working_directory.py @@ -15,7 +15,7 @@ def main(): sandbox = None try: sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="working-dir", wait_ready=True, api_token=api_token, diff --git a/examples/06_working_directory_async.py b/examples/06_working_directory_async.py index 5222c858..967d9329 100644 --- a/examples/06_working_directory_async.py +++ b/examples/06_working_directory_async.py @@ -16,7 +16,7 @@ async def main(): sandbox = None try: sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="working-dir", wait_ready=True, api_token=api_token, diff --git a/examples/07_file_operations.py b/examples/07_file_operations.py index 3dffceae..2593a465 100644 --- a/examples/07_file_operations.py +++ b/examples/07_file_operations.py @@ -15,7 +15,7 @@ def main(): sandbox = None try: sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="file-ops", wait_ready=True, api_token=api_token, diff --git a/examples/07_file_operations_async.py b/examples/07_file_operations_async.py index 93cb6c1f..2b683ce1 100644 --- a/examples/07_file_operations_async.py +++ b/examples/07_file_operations_async.py @@ -16,7 +16,7 @@ async def main(): sandbox = None try: sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="file-ops", wait_ready=True, api_token=api_token, diff --git a/examples/08_directory_operations.py b/examples/08_directory_operations.py index 38da4ddf..e9c7b589 100644 --- a/examples/08_directory_operations.py +++ b/examples/08_directory_operations.py @@ -15,7 +15,7 @@ def main(): sandbox = None try: sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="directory-ops", wait_ready=True, api_token=api_token, diff --git a/examples/08_directory_operations_async.py b/examples/08_directory_operations_async.py index 9f05cd8e..fd646b69 100644 --- a/examples/08_directory_operations_async.py +++ b/examples/08_directory_operations_async.py @@ -16,7 +16,7 @@ async def main(): sandbox = None try: sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="directory-ops", wait_ready=True, api_token=api_token, diff --git a/examples/09_binary_files.py b/examples/09_binary_files.py index 06d97107..fa83f0e6 100644 --- a/examples/09_binary_files.py +++ b/examples/09_binary_files.py @@ -16,7 +16,7 @@ def main(): sandbox = None try: sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="binary-files", wait_ready=True, api_token=api_token, diff --git a/examples/09_binary_files_async.py b/examples/09_binary_files_async.py index 495eceab..2de90ae5 100644 --- a/examples/09_binary_files_async.py +++ b/examples/09_binary_files_async.py @@ -17,7 +17,7 @@ async def main(): sandbox = None try: sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="binary-files", wait_ready=True, api_token=api_token, diff --git a/examples/10_batch_operations.py b/examples/10_batch_operations.py index c6d4c41d..c987e0c0 100644 --- a/examples/10_batch_operations.py +++ b/examples/10_batch_operations.py @@ -15,7 +15,7 @@ def main(): sandbox = None try: sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="batch-ops", wait_ready=True, api_token=api_token, diff --git a/examples/10_batch_operations_async.py b/examples/10_batch_operations_async.py index 51ed7e12..e0b26176 100644 --- a/examples/10_batch_operations_async.py +++ b/examples/10_batch_operations_async.py @@ -16,7 +16,7 @@ async def main(): sandbox = None try: sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="batch-ops", wait_ready=True, api_token=api_token, diff --git a/examples/11_upload_download.py b/examples/11_upload_download.py index beabbd50..e0703d49 100644 --- a/examples/11_upload_download.py +++ b/examples/11_upload_download.py @@ -16,7 +16,7 @@ def main(): sandbox = None try: sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="upload-download", wait_ready=True, api_token=api_token, diff --git a/examples/11_upload_download_async.py b/examples/11_upload_download_async.py index f32fcbd3..cf159a69 100644 --- a/examples/11_upload_download_async.py +++ b/examples/11_upload_download_async.py @@ -17,7 +17,7 @@ async def main(): sandbox = None try: sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="upload-download", wait_ready=True, api_token=api_token, diff --git a/examples/12_file_manipulation.py b/examples/12_file_manipulation.py index 4f3db43f..6268706d 100644 --- a/examples/12_file_manipulation.py +++ b/examples/12_file_manipulation.py @@ -15,7 +15,7 @@ def main(): sandbox = None try: sandbox = Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="file-manip", wait_ready=True, api_token=api_token, diff --git a/examples/12_file_manipulation_async.py b/examples/12_file_manipulation_async.py index 5afb8abe..47871fa6 100644 --- a/examples/12_file_manipulation_async.py +++ b/examples/12_file_manipulation_async.py @@ -16,7 +16,7 @@ async def main(): sandbox = None try: sandbox = await AsyncSandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="file-manip", wait_ready=True, api_token=api_token, diff --git a/examples/README.md b/examples/README.md index a4067b5a..8be5a4a6 100644 --- a/examples/README.md +++ b/examples/README.md @@ -33,7 +33,7 @@ from koyeb import Sandbox # Create a sandbox sandbox = await Sandbox.create( - image="python:3.11", + image="koyeb/sandbox", name="my-sandbox", wait_ready=True, api_token=api_token, diff --git a/koyeb/sandbox/exec.py b/koyeb/sandbox/exec.py index fc77ce82..e28a8ad9 100644 --- a/koyeb/sandbox/exec.py +++ b/koyeb/sandbox/exec.py @@ -117,6 +117,7 @@ def __call__( env=env, timeout=timeout, api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, on_stdout=on_stdout, on_stderr=on_stderr, ) @@ -174,6 +175,7 @@ async def __call__( env=env, timeout=timeout, api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, on_stdout=on_stdout, on_stderr=on_stderr, ) @@ -445,6 +447,7 @@ async def _exec_async( env: Optional[Dict[str, str]] = None, timeout: int = 30, api_token: Optional[str] = None, + sandbox_secret: Optional[str] = None, on_stdout: Optional[Callable[[str], None]] = None, on_stderr: Optional[Callable[[str], None]] = None, ) -> CommandResult: @@ -457,7 +460,13 @@ async def _exec_async( Supports streaming output via on_stdout/on_stderr callbacks. """ full_cmd = _normalize_command(command, *args) - shell_command = _build_shell_command(full_cmd, cwd, env) + + # Merge sandbox_secret into environment if provided + exec_env = env.copy() if env else {} + if sandbox_secret: + exec_env["SANDBOX_SECRET"] = sandbox_secret + + shell_command = _build_shell_command(full_cmd, cwd, exec_env) return await _execute_websocket_command( instance_id=instance_id, diff --git a/koyeb/sandbox/filesystem.py b/koyeb/sandbox/filesystem.py index ecb10cdc..d842cb26 100644 --- a/koyeb/sandbox/filesystem.py +++ b/koyeb/sandbox/filesystem.py @@ -84,6 +84,7 @@ async def _write_file_async( instance_id=self.sandbox.instance_id, command=f"printf '%s' {escaped_b64} | base64 -d > {escaped_path}", api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) if not result.success: @@ -115,12 +116,14 @@ async def _read_file_async(self, path: str, encoding: str = "utf-8") -> FileInfo instance_id=self.sandbox.instance_id, command=f"base64 < {escaped_path}", api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) else: result = await _exec_async( instance_id=self.sandbox.instance_id, command=f"cat {escaped_path}", api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) if not result.success: @@ -151,12 +154,14 @@ async def _mkdir_async(self, path: str, recursive: bool = False) -> None: instance_id=self.sandbox.instance_id, command=["mkdir", "-p", path], api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) else: result = await _exec_async( instance_id=self.sandbox.instance_id, command=["mkdir", path], api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) if not result.success: @@ -186,6 +191,7 @@ async def _list_dir_async(self, path: str = ".") -> List[str]: instance_id=self.sandbox.instance_id, command=["ls", "-A", path], api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) if not result.success: @@ -214,6 +220,7 @@ async def _delete_file_async(self, path: str) -> None: instance_id=self.sandbox.instance_id, command=["rm", path], api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) if not result.success: @@ -240,6 +247,7 @@ async def _delete_dir_async(self, path: str) -> None: instance_id=self.sandbox.instance_id, command=["rmdir", path], api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) if not result.success: @@ -269,6 +277,7 @@ async def _rename_file_async(self, old_path: str, new_path: str) -> None: instance_id=self.sandbox.instance_id, command=["mv", old_path, new_path], api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) if not result.success: @@ -296,6 +305,7 @@ async def _move_file_async(self, source_path: str, destination_path: str) -> Non instance_id=self.sandbox.instance_id, command=["mv", source_path, destination_path], api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) if not result.success: @@ -329,6 +339,7 @@ async def _exists_async(self, path: str) -> bool: instance_id=self.sandbox.instance_id, command=["test", "-e", path], api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) return result.success @@ -343,6 +354,7 @@ async def _is_file_async(self, path: str) -> bool: instance_id=self.sandbox.instance_id, command=["test", "-f", path], api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) return result.success @@ -357,6 +369,7 @@ async def _is_dir_async(self, path: str) -> bool: instance_id=self.sandbox.instance_id, command=["test", "-d", path], api_token=self.sandbox.api_token, + sandbox_secret=self.sandbox.sandbox_secret, ) return result.success diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index afe639fd..1310431e 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -5,6 +5,7 @@ """ import asyncio +import secrets import time from typing import Dict, List, Optional @@ -17,6 +18,8 @@ create_docker_source, get_api_client, is_sandbox_healthy, + create_koyeb_sandbox_ports, + create_koyeb_sandbox_routes ) @@ -34,6 +37,7 @@ def __init__( instance_id: str, name: Optional[str] = None, api_token: Optional[str] = None, + sandbox_secret: Optional[str] = None, ): self.sandbox_id = sandbox_id self.app_id = app_id @@ -41,12 +45,13 @@ def __init__( self.instance_id = instance_id self.name = name self.api_token = api_token + self.sandbox_secret = sandbox_secret self._created_at = time.time() @classmethod def create( cls, - image: str = "docker.io/library/ubuntu:latest", + image: str = "koyeb/sandbox", name: str = "quick-sandbox", wait_ready: bool = True, instance_type: str = "nano", @@ -60,7 +65,7 @@ def create( Create a new sandbox instance. Args: - image: Docker image to use (default: ubuntu:latest) + image: Docker image to use (default: koyeb/sandbox) name: Name of the sandbox wait_ready: Wait for sandbox to be ready (default: True) instance_type: Instance type (default: nano) @@ -102,7 +107,7 @@ def create( def _create_sync( cls, name: str, - image: str = "docker.io/library/ubuntu:latest", + image: str = "koyeb/sandbox", instance_type: str = "nano", ports: Optional[List[DeploymentPort]] = None, env: Optional[Dict[str, str]] = None, @@ -116,12 +121,25 @@ def _create_sync( """ apps_api, services_api, _ = get_api_client(api_token) + # Auto-configure ports for koyeb/sandbox image if not explicitly provided + if ports is None: + ports = create_koyeb_sandbox_ports() + routes = create_koyeb_sandbox_routes() + + # Generate secure sandbox secret + sandbox_secret = secrets.token_urlsafe(32) + + # Add SANDBOX_SECRET to environment variables + if env is None: + env = {} + env["SANDBOX_SECRET"] = sandbox_secret + app_name = f"sandbox-app-{name}-{int(time.time())}" app_response = apps_api.create_app(app=CreateApp(name=app_name)) app_id = app_response.app.id env_vars = build_env_vars(env) - docker_source = create_docker_source(image, ["sleep", "infinity"]) + docker_source = create_docker_source(image, []) deployment_definition = create_deployment_definition( name=f"sandbox-service-{name}", docker_source=docker_source, @@ -129,6 +147,7 @@ def _create_sync( instance_type=instance_type, ports=ports, regions=regions, + routes=routes, ) from koyeb.api.models.create_service import CreateService @@ -175,6 +194,7 @@ def _create_sync( instance_id=instance_id, name=name, api_token=api_token, + sandbox_secret=sandbox_secret, ) def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: @@ -241,7 +261,7 @@ class AsyncSandbox(Sandbox): @classmethod async def create( cls, - image: str = "docker.io/library/ubuntu:latest", + image: str = "koyeb/sandbox", name: str = "quick-sandbox", wait_ready: bool = True, instance_type: str = "nano", @@ -255,7 +275,7 @@ async def create( Create a new sandbox instance with async support. Args: - image: Docker image to use (default: ubuntu:latest) + image: Docker image to use (default: koyeb/sandbox) name: Name of the sandbox wait_ready: Wait for sandbox to be ready (default: True) instance_type: Instance type (default: nano) @@ -300,6 +320,7 @@ async def create( instance_id=sync_result.instance_id, name=sync_result.name, api_token=sync_result.api_token, + sandbox_secret=sync_result.sandbox_secret, ) sandbox._created_at = sync_result._created_at diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index e0eb7c7c..2d76b2ad 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -15,6 +15,7 @@ from koyeb.api.models.deployment_env import DeploymentEnv from koyeb.api.models.deployment_instance_type import DeploymentInstanceType from koyeb.api.models.deployment_port import DeploymentPort +from koyeb.api.models.deployment_route import DeploymentRoute from koyeb.api.models.deployment_scaling import DeploymentScaling from koyeb.api.models.docker_source import DockerSource from koyeb.api.models.instance_status import InstanceStatus @@ -74,7 +75,7 @@ def create_docker_source(image: str, command_args: List[str]) -> DockerSource: Args: image: Docker image name - command_args: Command and arguments to run + command_args: Command and arguments to run (optional, empty list means use image default) Returns: DockerSource object @@ -86,6 +87,52 @@ def create_docker_source(image: str, command_args: List[str]) -> DockerSource: ) +def create_koyeb_sandbox_ports() -> List[DeploymentPort]: + """ + Create port configuration for koyeb/sandbox image. + + Creates two ports: + - Port 3030 exposed on HTTP, mounted on /koyeb-sandbox/ + - Port 3031 exposed on HTTP, mounted on / + + Returns: + List of DeploymentPort objects configured for koyeb/sandbox + """ + return [ + DeploymentPort( + port=3030, + protocol="http", + ), + DeploymentPort( + port=3031, + protocol="http", + ) + ] + + +def create_koyeb_sandbox_routes() -> List[DeploymentRoute]: + """ + Create route configuration for koyeb/sandbox image to make it publicly accessible. + + Creates two routes: + - Port 3030 accessible at /koyeb-sandbox/ + - Port 3031 accessible at / + + Returns: + List of DeploymentRoute objects configured for koyeb/sandbox + """ + return [ + DeploymentRoute( + port=3030, + path="/koyeb-sandbox/" + ), + DeploymentRoute( + port=3031, + path="/" + ) + ] + + def create_deployment_definition( name: str, docker_source: DockerSource, @@ -93,6 +140,7 @@ def create_deployment_definition( instance_type: str, ports: Optional[List[DeploymentPort]] = None, regions: List[str] = None, + routes: Optional[List[DeploymentRoute]] = None, ) -> DeploymentDefinition: """ Create deployment definition for a sandbox service. @@ -104,6 +152,7 @@ def create_deployment_definition( instance_type: Instance type ports: List of ports (if provided, type becomes WEB, otherwise WORKER) regions: List of regions (defaults to North America) + routes: List of routes for public access Returns: DeploymentDefinition object @@ -121,6 +170,7 @@ def create_deployment_definition( docker=docker_source, env=env_vars, ports=ports, + routes=routes, instance_types=[DeploymentInstanceType(type=instance_type)], scalings=[DeploymentScaling(min=1, max=1)], regions=regions, From 7a085faa8c786970fa88daff0d0f2d382183f4de Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 12:57:19 +0100 Subject: [PATCH 17/47] Update computation for sandbox healthyness --- koyeb/sandbox/executor_client.py | 188 +++++++++++++++++++++++++++++++ koyeb/sandbox/sandbox.py | 22 +++- koyeb/sandbox/utils.py | 86 +++++++++++++- 3 files changed, 292 insertions(+), 4 deletions(-) create mode 100644 koyeb/sandbox/executor_client.py diff --git a/koyeb/sandbox/executor_client.py b/koyeb/sandbox/executor_client.py new file mode 100644 index 00000000..203f2808 --- /dev/null +++ b/koyeb/sandbox/executor_client.py @@ -0,0 +1,188 @@ +""" +Sandbox Executor API Client + +A simple Python client for interacting with the Sandbox Executor API. +""" + +import requests +from typing import Optional, Dict, List, Any + + +class SandboxClient: + """Client for the Sandbox Executor API.""" + + def __init__(self, base_url: str, secret: str): + """ + Initialize the Sandbox Client. + + Args: + base_url: The base URL of the sandbox server (e.g., 'http://localhost:8080') + secret: The authentication secret/token + """ + self.base_url = base_url.rstrip('/') + self.secret = secret + self.headers = { + 'Authorization': f'Bearer {secret}', + 'Content-Type': 'application/json' + } + + def health(self) -> Dict[str, str]: + """ + Check the health status of the server. + + Returns: + Dict with status information + """ + response = requests.get(f'{self.base_url}/health') + response.raise_for_status() + return response.json() + + def run( + self, + cmd: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None + ) -> Dict[str, Any]: + """ + Execute a shell command in the sandbox. + + Args: + cmd: The shell command to execute + cwd: Optional working directory for command execution + env: Optional environment variables to set/override + + Returns: + Dict containing stdout, stderr, error (if any), and exit code + """ + payload = {'cmd': cmd} + if cwd is not None: + payload['cwd'] = cwd + if env is not None: + payload['env'] = env + + response = requests.post( + f'{self.base_url}/run', + json=payload, + headers=self.headers + ) + response.raise_for_status() + return response.json() + + def write_file(self, path: str, content: str) -> Dict[str, Any]: + """ + Write content to a file. + + Args: + path: The file path to write to + content: The content to write + + Returns: + Dict with success status and error if any + """ + payload = { + 'path': path, + 'content': content + } + response = requests.post( + f'{self.base_url}/write_file', + json=payload, + headers=self.headers + ) + response.raise_for_status() + return response.json() + + def read_file(self, path: str) -> Dict[str, Any]: + """ + Read content from a file. + + Args: + path: The file path to read from + + Returns: + Dict with file content and error if any + """ + payload = {'path': path} + response = requests.post( + f'{self.base_url}/read_file', + json=payload, + headers=self.headers + ) + response.raise_for_status() + return response.json() + + def delete_file(self, path: str) -> Dict[str, Any]: + """ + Delete a file. + + Args: + path: The file path to delete + + Returns: + Dict with success status and error if any + """ + payload = {'path': path} + response = requests.post( + f'{self.base_url}/delete_file', + json=payload, + headers=self.headers + ) + response.raise_for_status() + return response.json() + + def make_dir(self, path: str) -> Dict[str, Any]: + """ + Create a directory (including parent directories). + + Args: + path: The directory path to create + + Returns: + Dict with success status and error if any + """ + payload = {'path': path} + response = requests.post( + f'{self.base_url}/make_dir', + json=payload, + headers=self.headers + ) + response.raise_for_status() + return response.json() + + def delete_dir(self, path: str) -> Dict[str, Any]: + """ + Recursively delete a directory and all its contents. + + Args: + path: The directory path to delete + + Returns: + Dict with success status and error if any + """ + payload = {'path': path} + response = requests.post( + f'{self.base_url}/delete_dir', + json=payload, + headers=self.headers + ) + response.raise_for_status() + return response.json() + + def list_dir(self, path: str) -> Dict[str, Any]: + """ + List the contents of a directory. + + Args: + path: The directory path to list + + Returns: + Dict with entries list and error if any + """ + payload = {'path': path} + response = requests.post( + f'{self.base_url}/list_dir', + json=payload, + headers=self.headers + ) + response.raise_for_status() + return response.json() + diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 1310431e..9dafaa96 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -47,6 +47,7 @@ def __init__( self.api_token = api_token self.sandbox_secret = sandbox_secret self._created_at = time.time() + self._sandbox_url = None @classmethod def create( @@ -226,6 +227,19 @@ def delete(self) -> None: services_api.delete_service(self.service_id) apps_api.delete_app(self.app_id) + def get_sandbox_url(self) -> Optional[str]: + """ + Get the public URL of the sandbox. + Caches the URL after first retrieval. + + Returns: + Optional[str]: The sandbox URL or None if unavailable + """ + if self._sandbox_url is None: + from .utils import get_sandbox_url + self._sandbox_url = get_sandbox_url(self.service_id, self.api_token) + return self._sandbox_url + def status(self) -> str: """Get current sandbox status""" from .utils import get_sandbox_status @@ -235,7 +249,13 @@ def status(self) -> str: def is_healthy(self) -> bool: """Check if sandbox is healthy and ready for operations""" - return is_sandbox_healthy(self.instance_id, self.api_token) + sandbox_url = self.get_sandbox_url() + return is_sandbox_healthy( + self.instance_id, + self.api_token, + sandbox_url=sandbox_url, + sandbox_secret=self.sandbox_secret + ) @property def filesystem(self): diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index 2d76b2ad..e3c84871 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -20,6 +20,8 @@ from koyeb.api.models.docker_source import DockerSource from koyeb.api.models.instance_status import InstanceStatus +from .executor_client import SandboxClient + def get_api_client( api_token: Optional[str] = None, host: Optional[str] = None @@ -189,9 +191,87 @@ def get_sandbox_status( return InstanceStatus.ERROR -def is_sandbox_healthy(instance_id: str, api_token: Optional[str] = None) -> bool: - """Check if sandbox is healthy and ready for operations.""" - return get_sandbox_status(instance_id, api_token) == InstanceStatus.HEALTHY +def get_sandbox_url(service_id: str, api_token: Optional[str] = None) -> Optional[str]: + """ + Get the public URL of a sandbox service. + + Returns the URL with /koyeb-sandbox path prepended since the sandbox + executor API is exposed on port 3030 which is mounted at /koyeb-sandbox/. + """ + try: + _, services_api, _ = get_api_client(api_token) + service_response = services_api.get_service(service_id) + + # Get the service app URL (this would be like: app-name-org.koyeb.app) + # The URL is typically constructed from the app name and organization + service = service_response.service + if hasattr(service, 'app_url') and service.app_url: + return f"https://{service.app_url}/koyeb-sandbox" + + # If app_url is not available, we need to get it from the app + if service.app_id: + apps_api, _, _ = get_api_client(api_token) + app_response = apps_api.get_app(service.app_id) + app = app_response.app + if hasattr(app, 'domains') and app.domains: + # Use the first public domain + return f"https://{app.domains[0]}/koyeb-sandbox" + # Fallback: construct from app name + if app.name and service.organization_id: + # This is an approximation - actual URL construction may vary + return f"https://{app.name}.koyeb.app/koyeb-sandbox" + + return None + except (NotFoundException, ApiException, Exception): + return None + + +def is_sandbox_healthy( + instance_id: str, + api_token: Optional[str] = None, + sandbox_url: Optional[str] = None, + sandbox_secret: Optional[str] = None +) -> bool: + """ + Check if sandbox is healthy and ready for operations. + + This function checks both: + 1. The Koyeb instance status (via API) + 2. The sandbox executor health endpoint (via SandboxClient, if URL and secret are provided) + + Args: + instance_id: The Koyeb instance ID + api_token: Koyeb API token + sandbox_url: Optional URL of the sandbox executor API + sandbox_secret: Optional secret for sandbox executor authentication + + Returns: + bool: True if sandbox is healthy, False otherwise + """ + # Check Koyeb instance status + instance_healthy = get_sandbox_status(instance_id, api_token) == InstanceStatus.HEALTHY + + # If instance is not healthy, no need to check executor + if not instance_healthy: + return False + + # If sandbox URL and secret are provided, also check executor health + if sandbox_url and sandbox_secret: + try: + client = SandboxClient(sandbox_url, sandbox_secret) + health_response = client.health() + # Check if health response indicates the server is healthy + # The exact response format may vary, but typically has a "status" field + if isinstance(health_response, dict): + status = health_response.get('status', '').lower() + return status in ['ok', 'healthy', 'ready'] + return True # If we got a response, consider it healthy + except Exception: + # If we can't reach the executor API, consider it unhealthy + return False + + # If only instance status was checked, return that result + return instance_healthy def ensure_sandbox_healthy(instance_id: str, api_token: Optional[str] = None) -> None: From dab4673bfc10f3690db2d2c5dadc5043e533d687 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 13:30:41 +0100 Subject: [PATCH 18/47] Use sandbox executor implementation --- docs/sandbox.md | 2 + koyeb/sandbox/exec.py | 413 ++++++----------------------- koyeb/sandbox/executor_client.py | 2 + koyeb/sandbox/filesystem.py | 429 ++++++++++++------------------- koyeb/sandbox/sandbox.py | 20 +- koyeb/sandbox/utils.py | 64 +++-- 6 files changed, 303 insertions(+), 627 deletions(-) diff --git a/docs/sandbox.md b/docs/sandbox.md index d24f9e9d..39719f2e 100644 --- a/docs/sandbox.md +++ b/docs/sandbox.md @@ -1214,6 +1214,8 @@ Get the current status of a sandbox instance. ```python def is_sandbox_healthy(instance_id: str, + sandbox_url: str, + sandbox_secret: str, api_token: Optional[str] = None) -> bool ``` diff --git a/koyeb/sandbox/exec.py b/koyeb/sandbox/exec.py index e28a8ad9..5267998a 100644 --- a/koyeb/sandbox/exec.py +++ b/koyeb/sandbox/exec.py @@ -2,25 +2,17 @@ """ Command execution utilities for Koyeb Sandbox instances -Using WebSocket connection to Koyeb API +Using SandboxClient HTTP API """ import asyncio -import base64 -import json -import shlex import time from dataclasses import dataclass from enum import Enum from typing import Callable, Dict, List, Optional, Union -import websockets - -from koyeb.api.models.stream_result_of_exec_command_reply import ( - StreamResultOfExecCommandReply, -) - -from .utils import SandboxError, get_api_client +from .executor_client import SandboxClient +from .utils import SandboxError class CommandStatus(str, Enum): @@ -72,6 +64,18 @@ class SandboxExecutor: def __init__(self, sandbox): self.sandbox = sandbox + self._client = None + + def _get_client(self) -> SandboxClient: + """Get or create SandboxClient instance""" + if self._client is None: + sandbox_url = self.sandbox.get_sandbox_url() + if not sandbox_url: + raise SandboxError("Unable to get sandbox URL") + if not self.sandbox.sandbox_secret: + raise SandboxError("Sandbox secret not available") + self._client = SandboxClient(sandbox_url, self.sandbox.sandbox_secret) + return self._client def __call__( self, @@ -109,19 +113,39 @@ def __call__( ) ``` """ - return asyncio.run( - _exec_async( - instance_id=self.sandbox.instance_id, + start_time = time.time() + + try: + client = self._get_client() + response = client.run(cmd=command, cwd=cwd, env=env) + + stdout = response.get('stdout', '') + stderr = response.get('stderr', '') + exit_code = response.get('exit_code', 0) + + # Call callbacks if provided + if on_stdout and stdout: + on_stdout(stdout) + if on_stderr and stderr: + on_stderr(stderr) + + return CommandResult( + stdout=stdout, + stderr=stderr, + exit_code=exit_code, + status=CommandStatus.FINISHED if exit_code == 0 else CommandStatus.FAILED, + duration=time.time() - start_time, + command=command, + ) + except Exception as e: + return CommandResult( + stdout="", + stderr=f"Command execution failed: {str(e)}", + exit_code=1, + status=CommandStatus.FAILED, + duration=time.time() - start_time, command=command, - cwd=cwd, - env=env, - timeout=timeout, - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - on_stdout=on_stdout, - on_stderr=on_stderr, ) - ) class AsyncSandboxExecutor(SandboxExecutor): @@ -168,311 +192,42 @@ async def __call__( ) ``` """ - return await _exec_async( - instance_id=self.sandbox.instance_id, - command=command, - cwd=cwd, - env=env, - timeout=timeout, - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - on_stdout=on_stdout, - on_stderr=on_stderr, - ) - - -def _normalize_command(command: Union[str, List[str]], *args: str) -> str: - """Normalize command to a string, handling both string and list formats""" - if isinstance(command, str): - if args: - # Join command and args with proper quoting - return ( - shlex.quote(command) + " " + " ".join(shlex.quote(arg) for arg in args) - ) - return command - else: - # List of commands - join them for shell execution - all_args = list(command) + list(args) - return " ".join(shlex.quote(arg) for arg in all_args) - - -def _build_shell_command( - command: Union[str, List[str]], - cwd: Optional[str] = None, - env: Optional[Dict[str, str]] = None, -) -> List[str]: - """Build a shell command with environment variables and working directory""" - # If command is a string, use it as-is - if isinstance(command, str): - shell_cmd = command - else: - # If it's a list, join it as a shell command - shell_cmd = " ".join(shlex.quote(arg) for arg in command) - - # Build the full command with env and cwd - parts = [] - - if cwd: - parts.append(f"cd {shlex.quote(cwd)}") - - if env: - env_vars = [] - for key, value in env.items(): - escaped_key = shlex.quote(key) - escaped_value = shlex.quote(value) - env_vars.append(f"{escaped_key}={escaped_value}") - if env_vars: - shell_cmd = " ".join(env_vars) + " " + shell_cmd - - if parts: - shell_cmd = " && ".join(parts) + " && " + shell_cmd - - return ["sh", "-c", shell_cmd] - - -def _decode_base64_content(content: Union[str, bytes]) -> str: - """Safely decode base64 content with proper error handling""" - if isinstance(content, str): + start_time = time.time() + + # Run in executor to avoid blocking + loop = asyncio.get_running_loop() + try: - return base64.b64decode(content).decode("utf-8") - except (base64.binascii.Error, UnicodeDecodeError): - # If base64 decoding fails, return as-is (might be plain text) - return content - else: - return content.decode("utf-8") - - -def _process_websocket_message( - message: str, -) -> tuple[Optional[str], Optional[str], Optional[int], Optional[str], bool]: - """Process WebSocket message using SDK models - - Returns: - tuple: (stdout, stderr, exit_code, error, is_finished) - """ - try: - stream_result = StreamResultOfExecCommandReply.from_dict(json.loads(message)) - except (json.JSONDecodeError, ValueError) as e: - return None, None, None, f"Failed to parse WebSocket message: {e}", False - - if stream_result.result: - result = stream_result.result - stdout = "" - stderr = "" - exit_code = None - is_finished = False - - if result.stdout and result.stdout.data: - stdout = _decode_base64_content(result.stdout.data) - - if result.stderr and result.stderr.data: - stderr = _decode_base64_content(result.stderr.data) - - if result.exit_code is not None: - exit_code = result.exit_code - # Only mark as finished if exited flag is explicitly set - # Otherwise, we might get exit_code but still have more output - if hasattr(result, "exited") and result.exited: - is_finished = True - # If exit_code is set but exited is not, don't mark as finished yet - # to allow for more output chunks - - return stdout, stderr, exit_code, None, is_finished - - elif stream_result.error: - error_msg = stream_result.error.message or "Unknown error" - return None, None, None, f"API Error: {error_msg}", True - - return None, None, None, None, False - - -def _get_websocket_url_and_headers( - instance_id: str, api_token: Optional[str] = None -) -> tuple[str, Dict[str, str]]: - """ - Get WebSocket URL and headers using SDK API client configuration. - - Args: - instance_id: The instance ID - api_token: API token (if None, will use get_api_client which reads from env) - - Returns: - Tuple of (websocket_url, headers_dict) - """ - _, _, instances_api = get_api_client(api_token) - api_client = instances_api.api_client - config = api_client.configuration - - host = config.host.replace("https://", "wss://").replace("http://", "ws://") - ws_url = f"{host}/v1/streams/instances/exec?id={instance_id}" - - headers = {} - auth_token = config.get_api_key_with_prefix("Bearer") - if auth_token: - headers["Authorization"] = auth_token - - return ws_url, headers - - -async def _execute_websocket_command( - instance_id: str, - command: List[str], - api_token: Optional[str] = None, - input_data: Optional[str] = None, - timeout: int = 30, - on_stdout: Optional[Callable[[str], None]] = None, - on_stderr: Optional[Callable[[str], None]] = None, -) -> CommandResult: - """Execute a command via WebSocket with proper timeout handling""" - start_time = time.time() - - ws_url, headers = _get_websocket_url_and_headers(instance_id, api_token) - - _, _, instances_api = get_api_client(api_token) - api_token_for_subprotocol = instances_api.api_client.configuration.api_key.get( - "Bearer" - ) - - try: - async with asyncio.timeout(timeout): - async with websockets.connect( - ws_url, - additional_headers=headers, - subprotocols=( - ["Bearer", api_token_for_subprotocol] - if api_token_for_subprotocol - else None - ), - ) as websocket: - command_frame = { - "id": instance_id, - "body": {"command": command}, - } - await websocket.send(json.dumps(command_frame)) - - if input_data: - input_frame = { - "id": instance_id, - "body": { - "stdin": { - "data": base64.b64encode( - input_data.encode("utf-8") - ).decode("utf-8"), - "close": True, - } - }, - } - await websocket.send(json.dumps(input_frame)) - - stdout_data = [] - stderr_data = [] - exit_code = 0 - - async for message in websocket: - stdout, stderr, cmd_exit_code, error, is_finished = ( - _process_websocket_message(message) - ) - - if error: - stderr_data.append(error) - if on_stderr: - on_stderr(error) - if "API Error" in error: - exit_code = 1 - break - continue - - # Process stdout first (may come with exit_code in same message) - if stdout: - stdout_data.append(stdout) - if on_stdout: - on_stdout(stdout) - - # Process stderr first (may come with exit_code in same message) - if stderr: - stderr_data.append(stderr) - if on_stderr: - on_stderr(stderr) - - # Store exit code but don't break yet - there might be more output - if cmd_exit_code is not None: - exit_code = cmd_exit_code - - # Only break when explicitly finished - continue processing until all output is received - if is_finished: - break - # If we have exit code but websocket closes naturally, that's fine too - - return CommandResult( - stdout="".join(stdout_data), - stderr="".join(stderr_data), - exit_code=exit_code, - status=( - CommandStatus.FINISHED - if exit_code == 0 - else CommandStatus.FAILED - ), - duration=time.time() - start_time, - command=command[0] if command else "", - args=command[1:] if len(command) > 1 else [], - ) - - except asyncio.TimeoutError: - return CommandResult( - stdout="", - stderr=f"Command timed out after {timeout} seconds", - exit_code=1, - status=CommandStatus.FAILED, - duration=time.time() - start_time, - command=command[0] if command else "", - args=command[1:] if len(command) > 1 else [], - ) - except Exception as e: - return CommandResult( - stdout="", - stderr=f"Command execution failed: {str(e)}", - exit_code=1, - status=CommandStatus.FAILED, - duration=time.time() - start_time, - command=command[0] if command else "", - args=command[1:] if len(command) > 1 else [], - ) - - -async def _exec_async( - instance_id: str, - command: Union[str, List[str]], - *args: str, - cwd: Optional[str] = None, - env: Optional[Dict[str, str]] = None, - timeout: int = 30, - api_token: Optional[str] = None, - sandbox_secret: Optional[str] = None, - on_stdout: Optional[Callable[[str], None]] = None, - on_stderr: Optional[Callable[[str], None]] = None, -) -> CommandResult: - """ - Execute a command in a shell via WebSocket connection to Koyeb API. - - Internal function - use sandbox.exec() for the public API. This function handles - command normalization and delegates to the WebSocket execution handler. - - Supports streaming output via on_stdout/on_stderr callbacks. - """ - full_cmd = _normalize_command(command, *args) - - # Merge sandbox_secret into environment if provided - exec_env = env.copy() if env else {} - if sandbox_secret: - exec_env["SANDBOX_SECRET"] = sandbox_secret - - shell_command = _build_shell_command(full_cmd, cwd, exec_env) - - return await _execute_websocket_command( - instance_id=instance_id, - command=shell_command, - api_token=api_token, - timeout=timeout, - on_stdout=on_stdout, - on_stderr=on_stderr, - ) + client = self._get_client() + response = await loop.run_in_executor( + None, + lambda: client.run(cmd=command, cwd=cwd, env=env) + ) + + stdout = response.get('stdout', '') + stderr = response.get('stderr', '') + exit_code = response.get('exit_code', 0) + + # Call callbacks if provided + if on_stdout and stdout: + on_stdout(stdout) + if on_stderr and stderr: + on_stderr(stderr) + + return CommandResult( + stdout=stdout, + stderr=stderr, + exit_code=exit_code, + status=CommandStatus.FINISHED if exit_code == 0 else CommandStatus.FAILED, + duration=time.time() - start_time, + command=command, + ) + except Exception as e: + return CommandResult( + stdout="", + stderr=f"Command execution failed: {str(e)}", + exit_code=1, + status=CommandStatus.FAILED, + duration=time.time() - start_time, + command=command, + ) diff --git a/koyeb/sandbox/executor_client.py b/koyeb/sandbox/executor_client.py index 203f2808..351351be 100644 --- a/koyeb/sandbox/executor_client.py +++ b/koyeb/sandbox/executor_client.py @@ -35,6 +35,8 @@ def health(self) -> Dict[str, str]: """ response = requests.get(f'{self.base_url}/health') response.raise_for_status() + if response.status_code != 200: + return {'status': 'unhealthy'} return response.json() def run( diff --git a/koyeb/sandbox/filesystem.py b/koyeb/sandbox/filesystem.py index d842cb26..2fa78c8e 100644 --- a/koyeb/sandbox/filesystem.py +++ b/koyeb/sandbox/filesystem.py @@ -2,18 +2,17 @@ """ Filesystem operations for Koyeb Sandbox instances -Using only the primitives available in the Koyeb API +Using SandboxClient HTTP API """ import asyncio import base64 import os -import shlex from dataclasses import dataclass from typing import Dict, List, Union -from .exec import _exec_async -from .utils import SandboxError, ensure_sandbox_healthy +from .executor_client import SandboxClient +from .utils import SandboxError class SandboxFilesystemError(SandboxError): @@ -39,13 +38,25 @@ class FileInfo: class SandboxFilesystem: """ Synchronous filesystem operations for Koyeb Sandbox instances. - Using only the primitives available in the Koyeb API. + Using SandboxClient HTTP API. For async usage, use AsyncSandboxFilesystem instead. """ def __init__(self, sandbox): self.sandbox = sandbox + self._client = None + + def _get_client(self) -> SandboxClient: + """Get or create SandboxClient instance""" + if self._client is None: + sandbox_url = self.sandbox.get_sandbox_url() + if not sandbox_url: + raise SandboxError("Unable to get sandbox URL") + if not self.sandbox.sandbox_secret: + raise SandboxError("Sandbox secret not available") + self._client = SandboxClient(sandbox_url, self.sandbox.sandbox_secret) + return self._client def write_file( self, path: str, content: Union[str, bytes], encoding: str = "utf-8" @@ -58,39 +69,17 @@ def write_file( content: Content to write (string or bytes) encoding: File encoding (default: "utf-8"). Use "base64" for binary data. """ - asyncio.run(self._write_file_async(path, content, encoding)) - - async def _write_file_async( - self, path: str, content: Union[str, bytes], encoding: str = "utf-8" - ) -> None: - """Internal async implementation for write_file""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - - escaped_path = shlex.quote(path) - + client = self._get_client() + if isinstance(content, bytes): - content_str = content.decode("utf-8", errors="replace") + content_str = content.decode("utf-8") else: content_str = content - - if encoding == "base64": - content_b64 = content_str - else: - content_b64 = base64.b64encode(content_str.encode("utf-8")).decode("utf-8") - - escaped_b64 = shlex.quote(content_b64) - - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=f"printf '%s' {escaped_b64} | base64 -d > {escaped_path}", - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) - - if not result.success: - if "Permission denied" in result.stderr: - raise SandboxFilesystemError(f"Permission denied: {path}") - raise SandboxFilesystemError(f"Failed to write file: {result.stderr}") + + try: + client.write_file(path, content_str) + except Exception as e: + raise SandboxFilesystemError(f"Failed to write file: {str(e)}") def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: """ @@ -103,37 +92,17 @@ def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: Returns: FileInfo: Object with content and encoding """ - return asyncio.run(self._read_file_async(path, encoding)) - - async def _read_file_async(self, path: str, encoding: str = "utf-8") -> FileInfo: - """Internal async implementation for read_file""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - - escaped_path = shlex.quote(path) - - if encoding == "base64": - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=f"base64 < {escaped_path}", - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) - else: - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=f"cat {escaped_path}", - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) - - if not result.success: - if "No such file or directory" in result.stderr: + client = self._get_client() + + try: + response = client.read_file(path) + content = response.get('content', '') + return FileInfo(content=content, encoding=encoding) + except Exception as e: + error_msg = str(e) + if "not found" in error_msg.lower(): raise FileNotFoundError(f"File not found: {path}") - if "Permission denied" in result.stderr: - raise SandboxFilesystemError(f"Permission denied: {path}") - raise SandboxFilesystemError(f"Failed to read file: {result.stderr}") - - return FileInfo(content=result.stdout.strip(), encoding=encoding) + raise SandboxFilesystemError(f"Failed to read file: {error_msg}") def mkdir(self, path: str, recursive: bool = False) -> None: """ @@ -143,33 +112,15 @@ def mkdir(self, path: str, recursive: bool = False) -> None: path: Absolute path to the directory recursive: Create parent directories if needed (default: False) """ - asyncio.run(self._mkdir_async(path, recursive)) - - async def _mkdir_async(self, path: str, recursive: bool = False) -> None: - """Internal async implementation for mkdir""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - - if recursive: - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["mkdir", "-p", path], - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) - else: - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["mkdir", path], - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) - - if not result.success: - if "File exists" in result.stderr: + client = self._get_client() + + try: + client.make_dir(path) + except Exception as e: + error_msg = str(e) + if "exists" in error_msg.lower(): raise FileExistsError(f"Directory already exists: {path}") - if "Permission denied" in result.stderr: - raise SandboxFilesystemError(f"Permission denied: {path}") - raise SandboxFilesystemError(f"Failed to create directory: {result.stderr}") + raise SandboxFilesystemError(f"Failed to create directory: {error_msg}") def list_dir(self, path: str = ".") -> List[str]: """ @@ -181,27 +132,17 @@ def list_dir(self, path: str = ".") -> List[str]: Returns: List[str]: Names of files and directories within the specified path. """ - return asyncio.run(self._list_dir_async(path)) - - async def _list_dir_async(self, path: str = ".") -> List[str]: - """Internal async implementation for list_dir""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["ls", "-A", path], - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) - - if not result.success: - if "No such file or directory" in result.stderr: + client = self._get_client() + + try: + response = client.list_dir(path) + entries = response.get('entries', []) + return entries + except Exception as e: + error_msg = str(e) + if "not found" in error_msg.lower(): raise FileNotFoundError(f"Directory not found: {path}") - if "Permission denied" in result.stderr: - raise SandboxFilesystemError(f"Permission denied: {path}") - raise SandboxFilesystemError(f"Failed to list directory: {result.stderr}") - - return [item for item in result.stdout.splitlines() if item] + raise SandboxFilesystemError(f"Failed to list directory: {error_msg}") def delete_file(self, path: str) -> None: """ @@ -210,25 +151,15 @@ def delete_file(self, path: str) -> None: Args: path: Absolute path to the file """ - asyncio.run(self._delete_file_async(path)) - - async def _delete_file_async(self, path: str) -> None: - """Internal async implementation for delete_file""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["rm", path], - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) - - if not result.success: - if "No such file or directory" in result.stderr: + client = self._get_client() + + try: + client.delete_file(path) + except Exception as e: + error_msg = str(e) + if "not found" in error_msg.lower(): raise FileNotFoundError(f"File not found: {path}") - if "Permission denied" in result.stderr: - raise SandboxFilesystemError(f"Permission denied: {path}") - raise SandboxFilesystemError(f"Failed to delete file: {result.stderr}") + raise SandboxFilesystemError(f"Failed to delete file: {error_msg}") def delete_dir(self, path: str) -> None: """ @@ -237,27 +168,17 @@ def delete_dir(self, path: str) -> None: Args: path: Absolute path to the directory """ - asyncio.run(self._delete_dir_async(path)) - - async def _delete_dir_async(self, path: str) -> None: - """Internal async implementation for delete_dir""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["rmdir", path], - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) - - if not result.success: - if "No such file or directory" in result.stderr: + client = self._get_client() + + try: + client.delete_dir(path) + except Exception as e: + error_msg = str(e) + if "not found" in error_msg.lower(): raise FileNotFoundError(f"Directory not found: {path}") - if "Directory not empty" in result.stderr: + if "not empty" in error_msg.lower(): raise SandboxFilesystemError(f"Directory not empty: {path}") - if "Permission denied" in result.stderr: - raise SandboxFilesystemError(f"Permission denied: {path}") - raise SandboxFilesystemError(f"Failed to delete directory: {result.stderr}") + raise SandboxFilesystemError(f"Failed to delete directory: {error_msg}") def rename_file(self, old_path: str, new_path: str) -> None: """ @@ -267,24 +188,14 @@ def rename_file(self, old_path: str, new_path: str) -> None: old_path: Current file path new_path: New file path """ - asyncio.run(self._rename_file_async(old_path, new_path)) - - async def _rename_file_async(self, old_path: str, new_path: str) -> None: - """Internal async implementation for rename_file""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["mv", old_path, new_path], - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) - + # Use exec since there's no direct rename in SandboxClient + from .exec import SandboxExecutor + executor = SandboxExecutor(self.sandbox) + result = executor(f"mv {old_path} {new_path}") + if not result.success: - if "No such file or directory" in result.stderr: + if "No such file" in result.stderr: raise FileNotFoundError(f"File not found: {old_path}") - if "Permission denied" in result.stderr: - raise SandboxFilesystemError(f"Permission denied: {old_path}") raise SandboxFilesystemError(f"Failed to rename file: {result.stderr}") def move_file(self, source_path: str, destination_path: str) -> None: @@ -295,24 +206,14 @@ def move_file(self, source_path: str, destination_path: str) -> None: source_path: Current file path destination_path: Destination path """ - asyncio.run(self._move_file_async(source_path, destination_path)) - - async def _move_file_async(self, source_path: str, destination_path: str) -> None: - """Internal async implementation for move_file""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["mv", source_path, destination_path], - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) - + # Use exec since there's no direct move in SandboxClient + from .exec import SandboxExecutor + executor = SandboxExecutor(self.sandbox) + result = executor(f"mv {source_path} {destination_path}") + if not result.success: - if "No such file or directory" in result.stderr: + if "No such file" in result.stderr: raise FileNotFoundError(f"File not found: {source_path}") - if "Permission denied" in result.stderr: - raise SandboxFilesystemError(f"Permission denied: {source_path}") raise SandboxFilesystemError(f"Failed to move file: {result.stderr}") def write_files(self, files: List[Dict[str, str]]) -> None: @@ -330,47 +231,23 @@ def write_files(self, files: List[Dict[str, str]]) -> None: def exists(self, path: str) -> bool: """Check if file/directory exists synchronously""" - return asyncio.run(self._exists_async(path)) - - async def _exists_async(self, path: str) -> bool: - """Internal async implementation for exists""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["test", "-e", path], - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) + from .exec import SandboxExecutor + executor = SandboxExecutor(self.sandbox) + result = executor(f"test -e {path}") return result.success def is_file(self, path: str) -> bool: """Check if path is a file synchronously""" - return asyncio.run(self._is_file_async(path)) - - async def _is_file_async(self, path: str) -> bool: - """Internal async implementation for is_file""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["test", "-f", path], - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) + from .exec import SandboxExecutor + executor = SandboxExecutor(self.sandbox) + result = executor(f"test -f {path}") return result.success def is_dir(self, path: str) -> bool: """Check if path is a directory synchronously""" - return asyncio.run(self._is_dir_async(path)) - - async def _is_dir_async(self, path: str) -> bool: - """Internal async implementation for is_dir""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["test", "-d", path], - api_token=self.sandbox.api_token, - sandbox_secret=self.sandbox.sandbox_secret, - ) + from .exec import SandboxExecutor + executor = SandboxExecutor(self.sandbox) + result = executor(f"test -d {path}") return result.success def upload_file(self, local_path: str, remote_path: str) -> None: @@ -381,19 +258,13 @@ def upload_file(self, local_path: str, remote_path: str) -> None: local_path: Path to the local file remote_path: Destination path in the sandbox """ - asyncio.run(self._upload_file_async(local_path, remote_path)) - - async def _upload_file_async(self, local_path: str, remote_path: str) -> None: - """Internal async implementation for upload_file""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - if not os.path.exists(local_path): raise FileNotFoundError(f"Local file not found: {local_path}") with open(local_path, "rb") as f: - content = base64.b64encode(f.read()).decode("utf-8") - - await self._write_file_async(remote_path, content, encoding="base64") + content = f.read().decode("utf-8") + + self.write_file(remote_path, content) def download_file(self, remote_path: str, local_path: str) -> None: """ @@ -403,15 +274,9 @@ def download_file(self, remote_path: str, local_path: str) -> None: remote_path: Path to the file in the sandbox local_path: Destination path on the local filesystem """ - asyncio.run(self._download_file_async(remote_path, local_path)) - - async def _download_file_async(self, remote_path: str, local_path: str) -> None: - """Internal async implementation for download_file""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - - file_info = await self._read_file_async(remote_path, encoding="base64") - content = base64.b64decode(file_info.content) - + file_info = self.read_file(remote_path) + content = file_info.content.encode("utf-8") + with open(local_path, "wb") as f: f.write(content) @@ -435,24 +300,13 @@ def rm(self, path: str, recursive: bool = False) -> None: path: Path to remove recursive: Remove recursively """ - asyncio.run(self._rm_async(path, recursive)) - - async def _rm_async(self, path: str, recursive: bool = False) -> None: - """Internal async implementation for rm""" - ensure_sandbox_healthy(self.sandbox.instance_id, self.sandbox.api_token) - + from .exec import SandboxExecutor + executor = SandboxExecutor(self.sandbox) + if recursive: - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["rm", "-rf", path], - api_token=self.sandbox.api_token, - ) + result = executor(f"rm -rf {path}") else: - result = await _exec_async( - instance_id=self.sandbox.instance_id, - command=["rm", path], - api_token=self.sandbox.api_token, - ) + result = executor(f"rm {path}") if not result.success: if "No such file or directory" in result.stderr: @@ -490,7 +344,8 @@ async def write_file( content: Content to write (string or bytes) encoding: File encoding (default: "utf-8"). Use "base64" for binary data. """ - await self._write_file_async(path, content, encoding) + loop = asyncio.get_running_loop() + await loop.run_in_executor(None, self.write_file, path, content, encoding) async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: """ @@ -503,7 +358,11 @@ async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: Returns: FileInfo: Object with content and encoding """ - return await self._read_file_async(path, encoding) + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).read_file(path, encoding) + ) async def mkdir(self, path: str, recursive: bool = False) -> None: """ @@ -513,7 +372,11 @@ async def mkdir(self, path: str, recursive: bool = False) -> None: path: Absolute path to the directory recursive: Create parent directories if needed (default: False) """ - await self._mkdir_async(path, recursive) + loop = asyncio.get_running_loop() + await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).mkdir(path, recursive) + ) async def list_dir(self, path: str = ".") -> List[str]: """ @@ -525,7 +388,11 @@ async def list_dir(self, path: str = ".") -> List[str]: Returns: List[str]: Names of files and directories within the specified path. """ - return await self._list_dir_async(path) + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).list_dir(path) + ) async def delete_file(self, path: str) -> None: """ @@ -534,7 +401,11 @@ async def delete_file(self, path: str) -> None: Args: path: Absolute path to the file """ - await self._delete_file_async(path) + loop = asyncio.get_running_loop() + await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).delete_file(path) + ) async def delete_dir(self, path: str) -> None: """ @@ -543,7 +414,11 @@ async def delete_dir(self, path: str) -> None: Args: path: Absolute path to the directory """ - await self._delete_dir_async(path) + loop = asyncio.get_running_loop() + await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).delete_dir(path) + ) async def rename_file(self, old_path: str, new_path: str) -> None: """ @@ -553,7 +428,11 @@ async def rename_file(self, old_path: str, new_path: str) -> None: old_path: Current file path new_path: New file path """ - await self._rename_file_async(old_path, new_path) + loop = asyncio.get_running_loop() + await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).rename_file(old_path, new_path) + ) async def move_file(self, source_path: str, destination_path: str) -> None: """ @@ -563,7 +442,11 @@ async def move_file(self, source_path: str, destination_path: str) -> None: source_path: Current file path destination_path: Destination path """ - await self._move_file_async(source_path, destination_path) + loop = asyncio.get_running_loop() + await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).move_file(source_path, destination_path) + ) async def write_files(self, files: List[Dict[str, str]]) -> None: """ @@ -580,15 +463,27 @@ async def write_files(self, files: List[Dict[str, str]]) -> None: async def exists(self, path: str) -> bool: """Check if file/directory exists asynchronously""" - return await self._exists_async(path) + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).exists(path) + ) async def is_file(self, path: str) -> bool: """Check if path is a file asynchronously""" - return await self._is_file_async(path) + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).is_file(path) + ) async def is_dir(self, path: str) -> bool: """Check if path is a directory asynchronously""" - return await self._is_dir_async(path) + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).is_dir(path) + ) async def upload_file(self, local_path: str, remote_path: str) -> None: """ @@ -598,7 +493,11 @@ async def upload_file(self, local_path: str, remote_path: str) -> None: local_path: Path to the local file remote_path: Destination path in the sandbox """ - await self._upload_file_async(local_path, remote_path) + loop = asyncio.get_running_loop() + await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).upload_file(local_path, remote_path) + ) async def download_file(self, remote_path: str, local_path: str) -> None: """ @@ -608,7 +507,11 @@ async def download_file(self, remote_path: str, local_path: str) -> None: remote_path: Path to the file in the sandbox local_path: Destination path on the local filesystem """ - await self._download_file_async(remote_path, local_path) + loop = asyncio.get_running_loop() + await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).download_file(remote_path, local_path) + ) async def ls(self, path: str = ".") -> List[str]: """ @@ -630,7 +533,11 @@ async def rm(self, path: str, recursive: bool = False) -> None: path: Path to remove recursive: Remove recursively """ - await self._rm_async(path, recursive) + loop = asyncio.get_running_loop() + await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).rm(path, recursive) + ) def open(self, path: str, mode: str = "r") -> "AsyncSandboxFileIO": """ diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 9dafaa96..985fabf8 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -210,9 +210,23 @@ def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: bool: True if sandbox became ready, False if timeout """ start_time = time.time() + sandbox_url = None while time.time() - start_time < timeout: - is_healthy = is_sandbox_healthy(self.instance_id, self.api_token) + # Get sandbox URL on first iteration or if not yet retrieved + if sandbox_url is None: + sandbox_url = self.get_sandbox_url() + # If URL is not available yet, wait and retry + if sandbox_url is None: + time.sleep(poll_interval) + continue + + is_healthy = is_sandbox_healthy( + self.instance_id, + sandbox_url=sandbox_url, + sandbox_secret=self.sandbox_secret, + api_token=self.api_token + ) if is_healthy: return True @@ -252,9 +266,9 @@ def is_healthy(self) -> bool: sandbox_url = self.get_sandbox_url() return is_sandbox_healthy( self.instance_id, - self.api_token, sandbox_url=sandbox_url, - sandbox_secret=self.sandbox_secret + sandbox_secret=self.sandbox_secret, + api_token=self.api_token ) @property diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index e3c84871..f15106e2 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -205,22 +205,14 @@ def get_sandbox_url(service_id: str, api_token: Optional[str] = None) -> Optiona # Get the service app URL (this would be like: app-name-org.koyeb.app) # The URL is typically constructed from the app name and organization service = service_response.service - if hasattr(service, 'app_url') and service.app_url: - return f"https://{service.app_url}/koyeb-sandbox" - # If app_url is not available, we need to get it from the app if service.app_id: apps_api, _, _ = get_api_client(api_token) app_response = apps_api.get_app(service.app_id) app = app_response.app if hasattr(app, 'domains') and app.domains: # Use the first public domain - return f"https://{app.domains[0]}/koyeb-sandbox" - # Fallback: construct from app name - if app.name and service.organization_id: - # This is an approximation - actual URL construction may vary - return f"https://{app.name}.koyeb.app/koyeb-sandbox" - + return f"https://{app.domains[0].name}/koyeb-sandbox" return None except (NotFoundException, ApiException, Exception): return None @@ -228,26 +220,34 @@ def get_sandbox_url(service_id: str, api_token: Optional[str] = None) -> Optiona def is_sandbox_healthy( instance_id: str, - api_token: Optional[str] = None, - sandbox_url: Optional[str] = None, - sandbox_secret: Optional[str] = None + sandbox_url: str, + sandbox_secret: str, + api_token: Optional[str] = None ) -> bool: """ Check if sandbox is healthy and ready for operations. - This function checks both: - 1. The Koyeb instance status (via API) - 2. The sandbox executor health endpoint (via SandboxClient, if URL and secret are provided) + This function requires both sandbox_url and sandbox_secret to properly check: + 1. The Koyeb instance status (via API) - using instance_id and api_token + 2. The sandbox executor health endpoint (via SandboxClient) - using sandbox_url and sandbox_secret Args: instance_id: The Koyeb instance ID api_token: Koyeb API token - sandbox_url: Optional URL of the sandbox executor API - sandbox_secret: Optional secret for sandbox executor authentication + sandbox_url: URL of the sandbox executor API (required) + sandbox_secret: Secret for sandbox executor authentication (required) Returns: bool: True if sandbox is healthy, False otherwise + + Raises: + ValueError: If sandbox_url or sandbox_secret are not provided """ + if not sandbox_url: + raise ValueError("sandbox_url is required for health check") + if not sandbox_secret: + raise ValueError("sandbox_secret is required for health check") + # Check Koyeb instance status instance_healthy = get_sandbox_status(instance_id, api_token) == InstanceStatus.HEALTHY @@ -255,23 +255,19 @@ def is_sandbox_healthy( if not instance_healthy: return False - # If sandbox URL and secret are provided, also check executor health - if sandbox_url and sandbox_secret: - try: - client = SandboxClient(sandbox_url, sandbox_secret) - health_response = client.health() - # Check if health response indicates the server is healthy - # The exact response format may vary, but typically has a "status" field - if isinstance(health_response, dict): - status = health_response.get('status', '').lower() - return status in ['ok', 'healthy', 'ready'] - return True # If we got a response, consider it healthy - except Exception: - # If we can't reach the executor API, consider it unhealthy - return False - - # If only instance status was checked, return that result - return instance_healthy + # Check executor health + try: + client = SandboxClient(sandbox_url, sandbox_secret) + health_response = client.health() + # Check if health response indicates the server is healthy + # The exact response format may vary, but typically has a "status" field + if isinstance(health_response, dict): + status = health_response.get('status', '').lower() + return status in ['ok', 'healthy', 'ready'] + return True # If we got a response, consider it healthy + except Exception: + # If we can't reach the executor API, consider it unhealthy + return False def ensure_sandbox_healthy(instance_id: str, api_token: Optional[str] = None) -> None: From fa949105020ef4c1b5a6f4682ce8989b4e304f6e Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 13:34:06 +0100 Subject: [PATCH 19/47] Implement automatic retry on 503 --- koyeb/sandbox/executor_client.py | 81 ++++++++++++++++++++++++++------ 1 file changed, 67 insertions(+), 14 deletions(-) diff --git a/koyeb/sandbox/executor_client.py b/koyeb/sandbox/executor_client.py index 351351be..1977503f 100644 --- a/koyeb/sandbox/executor_client.py +++ b/koyeb/sandbox/executor_client.py @@ -5,6 +5,7 @@ """ import requests +import time from typing import Optional, Dict, List, Any @@ -26,6 +27,58 @@ def __init__(self, base_url: str, secret: str): 'Content-Type': 'application/json' } +def _request_with_retry( + self, + method: str, + url: str, + max_retries: int = 3, + initial_backoff: float = 1.0, + **kwargs +) -> requests.Response: + """ + Make an HTTP request with retry logic for 503 errors. + + Args: + method: HTTP method (e.g., 'GET', 'POST') + url: The URL to request + max_retries: Maximum number of retry attempts + initial_backoff: Initial backoff time in seconds (doubles each retry) + **kwargs: Additional arguments to pass to requests + + Returns: + Response object + + Raises: + requests.HTTPError: If the request fails after all retries + """ + backoff = initial_backoff + last_exception = None + + for attempt in range(max_retries + 1): + try: + response = requests.request(method, url, **kwargs) + + # If we get a 503, retry with backoff + if response.status_code == 503 and attempt < max_retries: + time.sleep(backoff) + backoff *= 2 # Exponential backoff + continue + + response.raise_for_status() + return response + + except requests.HTTPError as e: + if e.response.status_code == 503 and attempt < max_retries: + time.sleep(backoff) + backoff *= 2 + last_exception = e + continue + raise + + # If we exhausted all retries, raise the last exception + if last_exception: + raise last_exception + def health(self) -> Dict[str, str]: """ Check the health status of the server. @@ -62,12 +115,12 @@ def run( if env is not None: payload['env'] = env - response = requests.post( + response = self._request_with_retry( + 'POST', f'{self.base_url}/run', json=payload, headers=self.headers ) - response.raise_for_status() return response.json() def write_file(self, path: str, content: str) -> Dict[str, Any]: @@ -85,12 +138,12 @@ def write_file(self, path: str, content: str) -> Dict[str, Any]: 'path': path, 'content': content } - response = requests.post( + response = self._request_with_retry( + 'POST', f'{self.base_url}/write_file', json=payload, headers=self.headers ) - response.raise_for_status() return response.json() def read_file(self, path: str) -> Dict[str, Any]: @@ -104,12 +157,12 @@ def read_file(self, path: str) -> Dict[str, Any]: Dict with file content and error if any """ payload = {'path': path} - response = requests.post( + response = self._request_with_retry( + 'POST', f'{self.base_url}/read_file', json=payload, headers=self.headers ) - response.raise_for_status() return response.json() def delete_file(self, path: str) -> Dict[str, Any]: @@ -123,12 +176,12 @@ def delete_file(self, path: str) -> Dict[str, Any]: Dict with success status and error if any """ payload = {'path': path} - response = requests.post( + response = self._request_with_retry( + 'POST', f'{self.base_url}/delete_file', json=payload, headers=self.headers ) - response.raise_for_status() return response.json() def make_dir(self, path: str) -> Dict[str, Any]: @@ -142,12 +195,12 @@ def make_dir(self, path: str) -> Dict[str, Any]: Dict with success status and error if any """ payload = {'path': path} - response = requests.post( + response = self._request_with_retry( + 'POST', f'{self.base_url}/make_dir', json=payload, headers=self.headers ) - response.raise_for_status() return response.json() def delete_dir(self, path: str) -> Dict[str, Any]: @@ -161,12 +214,12 @@ def delete_dir(self, path: str) -> Dict[str, Any]: Dict with success status and error if any """ payload = {'path': path} - response = requests.post( + response = self._request_with_retry( + 'POST', f'{self.base_url}/delete_dir', json=payload, headers=self.headers ) - response.raise_for_status() return response.json() def list_dir(self, path: str) -> Dict[str, Any]: @@ -180,11 +233,11 @@ def list_dir(self, path: str) -> Dict[str, Any]: Dict with entries list and error if any """ payload = {'path': path} - response = requests.post( + response = self._request_with_retry( + 'POST', f'{self.base_url}/list_dir', json=payload, headers=self.headers ) - response.raise_for_status() return response.json() From 6e3fe2641a65178a173ecac85b555bf6a2b2c882 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 15:44:10 +0100 Subject: [PATCH 20/47] Fix indent on retry --- koyeb/sandbox/executor_client.py | 172 ++++++++++++++++++++++--------- 1 file changed, 122 insertions(+), 50 deletions(-) diff --git a/koyeb/sandbox/executor_client.py b/koyeb/sandbox/executor_client.py index 1977503f..8668b53c 100644 --- a/koyeb/sandbox/executor_client.py +++ b/koyeb/sandbox/executor_client.py @@ -6,7 +6,7 @@ import requests import time -from typing import Optional, Dict, List, Any +from typing import Optional, Dict, List, Any, Iterator class SandboxClient: @@ -27,57 +27,57 @@ def __init__(self, base_url: str, secret: str): 'Content-Type': 'application/json' } -def _request_with_retry( - self, - method: str, - url: str, - max_retries: int = 3, - initial_backoff: float = 1.0, - **kwargs -) -> requests.Response: - """ - Make an HTTP request with retry logic for 503 errors. - - Args: - method: HTTP method (e.g., 'GET', 'POST') - url: The URL to request - max_retries: Maximum number of retry attempts - initial_backoff: Initial backoff time in seconds (doubles each retry) - **kwargs: Additional arguments to pass to requests - - Returns: - Response object - - Raises: - requests.HTTPError: If the request fails after all retries - """ - backoff = initial_backoff - last_exception = None - - for attempt in range(max_retries + 1): - try: - response = requests.request(method, url, **kwargs) - - # If we get a 503, retry with backoff - if response.status_code == 503 and attempt < max_retries: - time.sleep(backoff) - backoff *= 2 # Exponential backoff - continue + def _request_with_retry( + self, + method: str, + url: str, + max_retries: int = 3, + initial_backoff: float = 1.0, + **kwargs + ) -> requests.Response: + """ + Make an HTTP request with retry logic for 503 errors. + + Args: + method: HTTP method (e.g., 'GET', 'POST') + url: The URL to request + max_retries: Maximum number of retry attempts + initial_backoff: Initial backoff time in seconds (doubles each retry) + **kwargs: Additional arguments to pass to requests - response.raise_for_status() - return response + Returns: + Response object - except requests.HTTPError as e: - if e.response.status_code == 503 and attempt < max_retries: - time.sleep(backoff) - backoff *= 2 - last_exception = e - continue - raise - - # If we exhausted all retries, raise the last exception - if last_exception: - raise last_exception + Raises: + requests.HTTPError: If the request fails after all retries + """ + backoff = initial_backoff + last_exception = None + + for attempt in range(max_retries + 1): + try: + response = requests.request(method, url, **kwargs) + + # If we get a 503, retry with backoff + if response.status_code == 503 and attempt < max_retries: + time.sleep(backoff) + backoff *= 2 # Exponential backoff + continue + + response.raise_for_status() + return response + + except requests.HTTPError as e: + if e.response.status_code == 503 and attempt < max_retries: + time.sleep(backoff) + backoff *= 2 + last_exception = e + continue + raise + + # If we exhausted all retries, raise the last exception + if last_exception: + raise last_exception def health(self) -> Dict[str, str]: """ @@ -123,6 +123,78 @@ def run( ) return response.json() + def run_streaming( + self, + cmd: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None + ) -> Iterator[Dict[str, Any]]: + """ + Execute a shell command in the sandbox and stream the output in real-time. + + This method uses Server-Sent Events (SSE) to stream command output line-by-line + as it's produced. Use this for long-running commands where you want real-time + output. For simple commands where buffered output is acceptable, use run() instead. + + Args: + cmd: The shell command to execute + cwd: Optional working directory for command execution + env: Optional environment variables to set/override + + Yields: + Dict events with the following types: + + - output events (as command produces output): + {"stream": "stdout"|"stderr", "data": "line of output"} + + - complete event (when command finishes): + {"code": , "error": false} + + - error event (if command fails to start): + {"error": "error message"} + + Example: + >>> client = SandboxClient("http://localhost:8080", "secret") + >>> for event in client.run_streaming("echo 'Hello'; sleep 1; echo 'World'"): + ... if "stream" in event: + ... print(f"{event['stream']}: {event['data']}") + ... elif "code" in event: + ... print(f"Exit code: {event['code']}") + """ + import json + + payload = {'cmd': cmd} + if cwd is not None: + payload['cwd'] = cwd + if env is not None: + payload['env'] = env + + response = requests.post( + f'{self.base_url}/run_streaming', + json=payload, + headers=self.headers, + stream=True + ) + response.raise_for_status() + + # Parse Server-Sent Events stream + event_type = None + for line in response.iter_lines(decode_unicode=True): + if not line: + continue + + if line.startswith('event:'): + event_type = line[6:].strip() + elif line.startswith('data:'): + data = line[5:].strip() + try: + event_data = json.loads(data) + yield event_data + except json.JSONDecodeError: + # If we can't parse the JSON, yield the raw data + yield {"error": f"Failed to parse event data: {data}"} + event_type = None + def write_file(self, path: str, content: str) -> Dict[str, Any]: """ Write content to a file. From 0ae0666b62f3ccaec9071faddb579f5aa9a4be3a Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Tue, 4 Nov 2025 15:44:27 +0100 Subject: [PATCH 21/47] Use streaming when requested --- koyeb/sandbox/exec.py | 128 ++++++++++++++++++++++++++++++++++++++---- 1 file changed, 116 insertions(+), 12 deletions(-) diff --git a/koyeb/sandbox/exec.py b/koyeb/sandbox/exec.py index 5267998a..4ea7fe28 100644 --- a/koyeb/sandbox/exec.py +++ b/koyeb/sandbox/exec.py @@ -115,6 +115,59 @@ def __call__( """ start_time = time.time() + # Use streaming if callbacks are provided + if on_stdout or on_stderr: + stdout_buffer = [] + stderr_buffer = [] + exit_code = 0 + + try: + client = self._get_client() + for event in client.run_streaming(cmd=command, cwd=cwd, env=env): + if "stream" in event: + stream_type = event["stream"] + data = event["data"] + + if stream_type == "stdout": + stdout_buffer.append(data) + if on_stdout: + on_stdout(data) + elif stream_type == "stderr": + stderr_buffer.append(data) + if on_stderr: + on_stderr(data) + elif "code" in event: + exit_code = event["code"] + elif "error" in event and isinstance(event["error"], str): + # Error starting command + return CommandResult( + stdout="", + stderr=event["error"], + exit_code=1, + status=CommandStatus.FAILED, + duration=time.time() - start_time, + command=command, + ) + + return CommandResult( + stdout="".join(stdout_buffer), + stderr="".join(stderr_buffer), + exit_code=exit_code, + status=CommandStatus.FINISHED if exit_code == 0 else CommandStatus.FAILED, + duration=time.time() - start_time, + command=command, + ) + except Exception as e: + return CommandResult( + stdout="", + stderr=f"Command execution failed: {str(e)}", + exit_code=1, + status=CommandStatus.FAILED, + duration=time.time() - start_time, + command=command, + ) + + # Use regular run for non-streaming execution try: client = self._get_client() response = client.run(cmd=command, cwd=cwd, env=env) @@ -123,12 +176,6 @@ def __call__( stderr = response.get('stderr', '') exit_code = response.get('exit_code', 0) - # Call callbacks if provided - if on_stdout and stdout: - on_stdout(stdout) - if on_stderr and stderr: - on_stderr(stderr) - return CommandResult( stdout=stdout, stderr=stderr, @@ -194,6 +241,69 @@ async def __call__( """ start_time = time.time() + # Use streaming if callbacks are provided + if on_stdout or on_stderr: + stdout_buffer = [] + stderr_buffer = [] + exit_code = 0 + + try: + client = self._get_client() + # Run streaming in executor to avoid blocking + loop = asyncio.get_running_loop() + + def stream_command(): + events = [] + for event in client.run_streaming(cmd=command, cwd=cwd, env=env): + events.append(event) + return events + + events = await loop.run_in_executor(None, stream_command) + + for event in events: + if "stream" in event: + stream_type = event["stream"] + data = event["data"] + + if stream_type == "stdout": + stdout_buffer.append(data) + if on_stdout: + on_stdout(data) + elif stream_type == "stderr": + stderr_buffer.append(data) + if on_stderr: + on_stderr(data) + elif "code" in event: + exit_code = event["code"] + elif "error" in event and isinstance(event["error"], str): + # Error starting command + return CommandResult( + stdout="", + stderr=event["error"], + exit_code=1, + status=CommandStatus.FAILED, + duration=time.time() - start_time, + command=command, + ) + + return CommandResult( + stdout="".join(stdout_buffer), + stderr="".join(stderr_buffer), + exit_code=exit_code, + status=CommandStatus.FINISHED if exit_code == 0 else CommandStatus.FAILED, + duration=time.time() - start_time, + command=command, + ) + except Exception as e: + return CommandResult( + stdout="", + stderr=f"Command execution failed: {str(e)}", + exit_code=1, + status=CommandStatus.FAILED, + duration=time.time() - start_time, + command=command, + ) + # Run in executor to avoid blocking loop = asyncio.get_running_loop() @@ -208,12 +318,6 @@ async def __call__( stderr = response.get('stderr', '') exit_code = response.get('exit_code', 0) - # Call callbacks if provided - if on_stdout and stdout: - on_stdout(stdout) - if on_stderr and stderr: - on_stderr(stderr) - return CommandResult( stdout=stdout, stderr=stderr, From 4b8b3f3558be83c73c0f8a787d67280c4647a742 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 16:04:07 +0100 Subject: [PATCH 22/47] cleanup / refacto --- koyeb/sandbox/exec.py | 81 +++++++------ koyeb/sandbox/executor_client.py | 194 +++++++++++++------------------ koyeb/sandbox/filesystem.py | 118 ++++++++++--------- koyeb/sandbox/sandbox.py | 32 +++-- 4 files changed, 200 insertions(+), 225 deletions(-) diff --git a/koyeb/sandbox/exec.py b/koyeb/sandbox/exec.py index 4ea7fe28..ea940435 100644 --- a/koyeb/sandbox/exec.py +++ b/koyeb/sandbox/exec.py @@ -9,7 +9,7 @@ import time from dataclasses import dataclass from enum import Enum -from typing import Callable, Dict, List, Optional, Union +from typing import Callable, Dict, List, Optional from .executor_client import SandboxClient from .utils import SandboxError @@ -58,7 +58,7 @@ class SandboxExecutor: """ Synchronous command execution interface for Koyeb Sandbox instances. Bound to a specific sandbox instance. - + For async usage, use AsyncSandboxExecutor instead. """ @@ -93,7 +93,7 @@ def __call__( command: Command to execute as a string (e.g., "python -c 'print(2+2)'") cwd: Working directory for the command env: Environment variables for the command - timeout: Command timeout in seconds + timeout: Command timeout in seconds (not currently enforced, reserved for future use) on_stdout: Optional callback for streaming stdout chunks on_stderr: Optional callback for streaming stderr chunks @@ -114,20 +114,20 @@ def __call__( ``` """ start_time = time.time() - + # Use streaming if callbacks are provided if on_stdout or on_stderr: stdout_buffer = [] stderr_buffer = [] exit_code = 0 - + try: client = self._get_client() for event in client.run_streaming(cmd=command, cwd=cwd, env=env): if "stream" in event: stream_type = event["stream"] data = event["data"] - + if stream_type == "stdout": stdout_buffer.append(data) if on_stdout: @@ -148,12 +148,16 @@ def __call__( duration=time.time() - start_time, command=command, ) - + return CommandResult( stdout="".join(stdout_buffer), stderr="".join(stderr_buffer), exit_code=exit_code, - status=CommandStatus.FINISHED if exit_code == 0 else CommandStatus.FAILED, + status=( + CommandStatus.FINISHED + if exit_code == 0 + else CommandStatus.FAILED + ), duration=time.time() - start_time, command=command, ) @@ -166,21 +170,23 @@ def __call__( duration=time.time() - start_time, command=command, ) - + # Use regular run for non-streaming execution try: client = self._get_client() response = client.run(cmd=command, cwd=cwd, env=env) - - stdout = response.get('stdout', '') - stderr = response.get('stderr', '') - exit_code = response.get('exit_code', 0) - + + stdout = response.get("stdout", "") + stderr = response.get("stderr", "") + exit_code = response.get("exit_code", 0) + return CommandResult( stdout=stdout, stderr=stderr, exit_code=exit_code, - status=CommandStatus.FINISHED if exit_code == 0 else CommandStatus.FAILED, + status=( + CommandStatus.FINISHED if exit_code == 0 else CommandStatus.FAILED + ), duration=time.time() - start_time, command=command, ) @@ -199,7 +205,7 @@ class AsyncSandboxExecutor(SandboxExecutor): """ Async command execution interface for Koyeb Sandbox instances. Bound to a specific sandbox instance. - + Inherits from SandboxExecutor and provides async command execution. """ @@ -219,7 +225,7 @@ async def __call__( command: Command to execute as a string (e.g., "python -c 'print(2+2)'") cwd: Working directory for the command env: Environment variables for the command - timeout: Command timeout in seconds + timeout: Command timeout in seconds (not currently enforced, reserved for future use) on_stdout: Optional callback for streaming stdout chunks on_stderr: Optional callback for streaming stderr chunks @@ -240,31 +246,31 @@ async def __call__( ``` """ start_time = time.time() - + # Use streaming if callbacks are provided if on_stdout or on_stderr: stdout_buffer = [] stderr_buffer = [] exit_code = 0 - + try: client = self._get_client() # Run streaming in executor to avoid blocking loop = asyncio.get_running_loop() - + def stream_command(): events = [] for event in client.run_streaming(cmd=command, cwd=cwd, env=env): events.append(event) return events - + events = await loop.run_in_executor(None, stream_command) - + for event in events: if "stream" in event: stream_type = event["stream"] data = event["data"] - + if stream_type == "stdout": stdout_buffer.append(data) if on_stdout: @@ -285,12 +291,16 @@ def stream_command(): duration=time.time() - start_time, command=command, ) - + return CommandResult( stdout="".join(stdout_buffer), stderr="".join(stderr_buffer), exit_code=exit_code, - status=CommandStatus.FINISHED if exit_code == 0 else CommandStatus.FAILED, + status=( + CommandStatus.FINISHED + if exit_code == 0 + else CommandStatus.FAILED + ), duration=time.time() - start_time, command=command, ) @@ -303,26 +313,27 @@ def stream_command(): duration=time.time() - start_time, command=command, ) - + # Run in executor to avoid blocking loop = asyncio.get_running_loop() - + try: client = self._get_client() response = await loop.run_in_executor( - None, - lambda: client.run(cmd=command, cwd=cwd, env=env) + None, lambda: client.run(cmd=command, cwd=cwd, env=env) ) - - stdout = response.get('stdout', '') - stderr = response.get('stderr', '') - exit_code = response.get('exit_code', 0) - + + stdout = response.get("stdout", "") + stderr = response.get("stderr", "") + exit_code = response.get("exit_code", 0) + return CommandResult( stdout=stdout, stderr=stderr, exit_code=exit_code, - status=CommandStatus.FINISHED if exit_code == 0 else CommandStatus.FAILED, + status=( + CommandStatus.FINISHED if exit_code == 0 else CommandStatus.FAILED + ), duration=time.time() - start_time, command=command, ) diff --git a/koyeb/sandbox/executor_client.py b/koyeb/sandbox/executor_client.py index 8668b53c..ca0d7ab0 100644 --- a/koyeb/sandbox/executor_client.py +++ b/koyeb/sandbox/executor_client.py @@ -4,69 +4,70 @@ A simple Python client for interacting with the Sandbox Executor API. """ -import requests import time -from typing import Optional, Dict, List, Any, Iterator +from typing import Any, Dict, Iterator, Optional + +import requests class SandboxClient: """Client for the Sandbox Executor API.""" - + def __init__(self, base_url: str, secret: str): """ Initialize the Sandbox Client. - + Args: base_url: The base URL of the sandbox server (e.g., 'http://localhost:8080') secret: The authentication secret/token """ - self.base_url = base_url.rstrip('/') + self.base_url = base_url.rstrip("/") self.secret = secret self.headers = { - 'Authorization': f'Bearer {secret}', - 'Content-Type': 'application/json' + "Authorization": f"Bearer {secret}", + "Content-Type": "application/json", } - + def _request_with_retry( self, method: str, url: str, max_retries: int = 3, initial_backoff: float = 1.0, - **kwargs + **kwargs, ) -> requests.Response: """ Make an HTTP request with retry logic for 503 errors. - + Args: method: HTTP method (e.g., 'GET', 'POST') url: The URL to request max_retries: Maximum number of retry attempts initial_backoff: Initial backoff time in seconds (doubles each retry) **kwargs: Additional arguments to pass to requests - + Returns: Response object - + Raises: requests.HTTPError: If the request fails after all retries """ backoff = initial_backoff last_exception = None - + for attempt in range(max_retries + 1): try: response = requests.request(method, url, **kwargs) - + # If we get a 503, retry with backoff if response.status_code == 503 and attempt < max_retries: time.sleep(backoff) backoff *= 2 # Exponential backoff continue - + response.raise_for_status() return response - + except requests.HTTPError as e: if e.response.status_code == 503 and attempt < max_retries: time.sleep(backoff) @@ -74,85 +75,76 @@ def _request_with_retry( last_exception = e continue raise - + # If we exhausted all retries, raise the last exception if last_exception: raise last_exception - + def health(self) -> Dict[str, str]: """ Check the health status of the server. - + Returns: Dict with status information """ - response = requests.get(f'{self.base_url}/health') + response = requests.get(f"{self.base_url}/health") response.raise_for_status() if response.status_code != 200: - return {'status': 'unhealthy'} + return {"status": "unhealthy"} return response.json() - + def run( - self, - cmd: str, - cwd: Optional[str] = None, - env: Optional[Dict[str, str]] = None + self, cmd: str, cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None ) -> Dict[str, Any]: """ Execute a shell command in the sandbox. - + Args: cmd: The shell command to execute cwd: Optional working directory for command execution env: Optional environment variables to set/override - + Returns: Dict containing stdout, stderr, error (if any), and exit code """ - payload = {'cmd': cmd} + payload = {"cmd": cmd} if cwd is not None: - payload['cwd'] = cwd + payload["cwd"] = cwd if env is not None: - payload['env'] = env - + payload["env"] = env + response = self._request_with_retry( - 'POST', - f'{self.base_url}/run', - json=payload, - headers=self.headers + "POST", f"{self.base_url}/run", json=payload, headers=self.headers ) return response.json() - + def run_streaming( - self, - cmd: str, - cwd: Optional[str] = None, - env: Optional[Dict[str, str]] = None + self, cmd: str, cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None ) -> Iterator[Dict[str, Any]]: """ Execute a shell command in the sandbox and stream the output in real-time. - + This method uses Server-Sent Events (SSE) to stream command output line-by-line as it's produced. Use this for long-running commands where you want real-time output. For simple commands where buffered output is acceptable, use run() instead. - + Args: cmd: The shell command to execute cwd: Optional working directory for command execution env: Optional environment variables to set/override - + Yields: Dict events with the following types: - + - output events (as command produces output): {"stream": "stdout"|"stderr", "data": "line of output"} - + - complete event (when command finishes): {"code": , "error": false} - + - error event (if command fails to start): {"error": "error message"} - + Example: >>> client = SandboxClient("http://localhost:8080", "secret") >>> for event in client.run_streaming("echo 'Hello'; sleep 1; echo 'World'"): @@ -162,30 +154,27 @@ def run_streaming( ... print(f"Exit code: {event['code']}") """ import json - - payload = {'cmd': cmd} + + payload = {"cmd": cmd} if cwd is not None: - payload['cwd'] = cwd + payload["cwd"] = cwd if env is not None: - payload['env'] = env - + payload["env"] = env + response = requests.post( - f'{self.base_url}/run_streaming', + f"{self.base_url}/run_streaming", json=payload, headers=self.headers, - stream=True + stream=True, ) response.raise_for_status() - + # Parse Server-Sent Events stream - event_type = None for line in response.iter_lines(decode_unicode=True): if not line: continue - - if line.startswith('event:'): - event_type = line[6:].strip() - elif line.startswith('data:'): + + if line.startswith("data:"): data = line[5:].strip() try: event_data = json.loads(data) @@ -193,123 +182,100 @@ def run_streaming( except json.JSONDecodeError: # If we can't parse the JSON, yield the raw data yield {"error": f"Failed to parse event data: {data}"} - event_type = None - + def write_file(self, path: str, content: str) -> Dict[str, Any]: """ Write content to a file. - + Args: path: The file path to write to content: The content to write - + Returns: Dict with success status and error if any """ - payload = { - 'path': path, - 'content': content - } + payload = {"path": path, "content": content} response = self._request_with_retry( - 'POST', - f'{self.base_url}/write_file', - json=payload, - headers=self.headers + "POST", f"{self.base_url}/write_file", json=payload, headers=self.headers ) return response.json() - + def read_file(self, path: str) -> Dict[str, Any]: """ Read content from a file. - + Args: path: The file path to read from - + Returns: Dict with file content and error if any """ - payload = {'path': path} + payload = {"path": path} response = self._request_with_retry( - 'POST', - f'{self.base_url}/read_file', - json=payload, - headers=self.headers + "POST", f"{self.base_url}/read_file", json=payload, headers=self.headers ) return response.json() - + def delete_file(self, path: str) -> Dict[str, Any]: """ Delete a file. - + Args: path: The file path to delete - + Returns: Dict with success status and error if any """ - payload = {'path': path} + payload = {"path": path} response = self._request_with_retry( - 'POST', - f'{self.base_url}/delete_file', - json=payload, - headers=self.headers + "POST", f"{self.base_url}/delete_file", json=payload, headers=self.headers ) return response.json() - + def make_dir(self, path: str) -> Dict[str, Any]: """ Create a directory (including parent directories). - + Args: path: The directory path to create - + Returns: Dict with success status and error if any """ - payload = {'path': path} + payload = {"path": path} response = self._request_with_retry( - 'POST', - f'{self.base_url}/make_dir', - json=payload, - headers=self.headers + "POST", f"{self.base_url}/make_dir", json=payload, headers=self.headers ) return response.json() - + def delete_dir(self, path: str) -> Dict[str, Any]: """ Recursively delete a directory and all its contents. - + Args: path: The directory path to delete - + Returns: Dict with success status and error if any """ - payload = {'path': path} + payload = {"path": path} response = self._request_with_retry( - 'POST', - f'{self.base_url}/delete_dir', - json=payload, - headers=self.headers + "POST", f"{self.base_url}/delete_dir", json=payload, headers=self.headers ) return response.json() - + def list_dir(self, path: str) -> Dict[str, Any]: """ List the contents of a directory. - + Args: path: The directory path to list - + Returns: Dict with entries list and error if any """ - payload = {'path': path} + payload = {"path": path} response = self._request_with_retry( - 'POST', - f'{self.base_url}/list_dir', - json=payload, - headers=self.headers + "POST", f"{self.base_url}/list_dir", json=payload, headers=self.headers ) return response.json() - diff --git a/koyeb/sandbox/filesystem.py b/koyeb/sandbox/filesystem.py index 2fa78c8e..fabcbbb1 100644 --- a/koyeb/sandbox/filesystem.py +++ b/koyeb/sandbox/filesystem.py @@ -6,7 +6,6 @@ """ import asyncio -import base64 import os from dataclasses import dataclass from typing import Dict, List, Union @@ -39,13 +38,14 @@ class SandboxFilesystem: """ Synchronous filesystem operations for Koyeb Sandbox instances. Using SandboxClient HTTP API. - + For async usage, use AsyncSandboxFilesystem instead. """ def __init__(self, sandbox): self.sandbox = sandbox self._client = None + self._executor = None def _get_client(self) -> SandboxClient: """Get or create SandboxClient instance""" @@ -58,6 +58,14 @@ def _get_client(self) -> SandboxClient: self._client = SandboxClient(sandbox_url, self.sandbox.sandbox_secret) return self._client + def _get_executor(self): + """Get or create SandboxExecutor instance""" + if self._executor is None: + from .exec import SandboxExecutor + + self._executor = SandboxExecutor(self.sandbox) + return self._executor + def write_file( self, path: str, content: Union[str, bytes], encoding: str = "utf-8" ) -> None: @@ -70,12 +78,12 @@ def write_file( encoding: File encoding (default: "utf-8"). Use "base64" for binary data. """ client = self._get_client() - + if isinstance(content, bytes): content_str = content.decode("utf-8") else: content_str = content - + try: client.write_file(path, content_str) except Exception as e: @@ -93,10 +101,10 @@ def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: FileInfo: Object with content and encoding """ client = self._get_client() - + try: response = client.read_file(path) - content = response.get('content', '') + content = response.get("content", "") return FileInfo(content=content, encoding=encoding) except Exception as e: error_msg = str(e) @@ -110,10 +118,10 @@ def mkdir(self, path: str, recursive: bool = False) -> None: Args: path: Absolute path to the directory - recursive: Create parent directories if needed (default: False) + recursive: Create parent directories if needed (default: False, not used - API always creates parents) """ client = self._get_client() - + try: client.make_dir(path) except Exception as e: @@ -133,10 +141,10 @@ def list_dir(self, path: str = ".") -> List[str]: List[str]: Names of files and directories within the specified path. """ client = self._get_client() - + try: response = client.list_dir(path) - entries = response.get('entries', []) + entries = response.get("entries", []) return entries except Exception as e: error_msg = str(e) @@ -152,7 +160,7 @@ def delete_file(self, path: str) -> None: path: Absolute path to the file """ client = self._get_client() - + try: client.delete_file(path) except Exception as e: @@ -169,7 +177,7 @@ def delete_dir(self, path: str) -> None: path: Absolute path to the directory """ client = self._get_client() - + try: client.delete_dir(path) except Exception as e: @@ -189,10 +197,9 @@ def rename_file(self, old_path: str, new_path: str) -> None: new_path: New file path """ # Use exec since there's no direct rename in SandboxClient - from .exec import SandboxExecutor - executor = SandboxExecutor(self.sandbox) + executor = self._get_executor() result = executor(f"mv {old_path} {new_path}") - + if not result.success: if "No such file" in result.stderr: raise FileNotFoundError(f"File not found: {old_path}") @@ -207,10 +214,9 @@ def move_file(self, source_path: str, destination_path: str) -> None: destination_path: Destination path """ # Use exec since there's no direct move in SandboxClient - from .exec import SandboxExecutor - executor = SandboxExecutor(self.sandbox) + executor = self._get_executor() result = executor(f"mv {source_path} {destination_path}") - + if not result.success: if "No such file" in result.stderr: raise FileNotFoundError(f"File not found: {source_path}") @@ -231,22 +237,19 @@ def write_files(self, files: List[Dict[str, str]]) -> None: def exists(self, path: str) -> bool: """Check if file/directory exists synchronously""" - from .exec import SandboxExecutor - executor = SandboxExecutor(self.sandbox) + executor = self._get_executor() result = executor(f"test -e {path}") return result.success def is_file(self, path: str) -> bool: """Check if path is a file synchronously""" - from .exec import SandboxExecutor - executor = SandboxExecutor(self.sandbox) + executor = self._get_executor() result = executor(f"test -f {path}") return result.success def is_dir(self, path: str) -> bool: """Check if path is a directory synchronously""" - from .exec import SandboxExecutor - executor = SandboxExecutor(self.sandbox) + executor = self._get_executor() result = executor(f"test -d {path}") return result.success @@ -263,7 +266,7 @@ def upload_file(self, local_path: str, remote_path: str) -> None: with open(local_path, "rb") as f: content = f.read().decode("utf-8") - + self.write_file(remote_path, content) def download_file(self, remote_path: str, local_path: str) -> None: @@ -276,7 +279,7 @@ def download_file(self, remote_path: str, local_path: str) -> None: """ file_info = self.read_file(remote_path) content = file_info.content.encode("utf-8") - + with open(local_path, "wb") as f: f.write(content) @@ -300,9 +303,8 @@ def rm(self, path: str, recursive: bool = False) -> None: path: Path to remove recursive: Remove recursively """ - from .exec import SandboxExecutor - executor = SandboxExecutor(self.sandbox) - + executor = self._get_executor() + if recursive: result = executor(f"rm -rf {path}") else: @@ -345,7 +347,12 @@ async def write_file( encoding: File encoding (default: "utf-8"). Use "base64" for binary data. """ loop = asyncio.get_running_loop() - await loop.run_in_executor(None, self.write_file, path, content, encoding) + await loop.run_in_executor( + None, + lambda: super(AsyncSandboxFilesystem, self).write_file( + path, content, encoding + ), + ) async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: """ @@ -360,8 +367,7 @@ async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: """ loop = asyncio.get_running_loop() return await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).read_file(path, encoding) + None, lambda: super(AsyncSandboxFilesystem, self).read_file(path, encoding) ) async def mkdir(self, path: str, recursive: bool = False) -> None: @@ -370,12 +376,11 @@ async def mkdir(self, path: str, recursive: bool = False) -> None: Args: path: Absolute path to the directory - recursive: Create parent directories if needed (default: False) + recursive: Create parent directories if needed (default: False, not used - API always creates parents) """ loop = asyncio.get_running_loop() await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).mkdir(path, recursive) + None, lambda: super(AsyncSandboxFilesystem, self).mkdir(path, recursive) ) async def list_dir(self, path: str = ".") -> List[str]: @@ -390,8 +395,7 @@ async def list_dir(self, path: str = ".") -> List[str]: """ loop = asyncio.get_running_loop() return await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).list_dir(path) + None, lambda: super(AsyncSandboxFilesystem, self).list_dir(path) ) async def delete_file(self, path: str) -> None: @@ -403,8 +407,7 @@ async def delete_file(self, path: str) -> None: """ loop = asyncio.get_running_loop() await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).delete_file(path) + None, lambda: super(AsyncSandboxFilesystem, self).delete_file(path) ) async def delete_dir(self, path: str) -> None: @@ -416,8 +419,7 @@ async def delete_dir(self, path: str) -> None: """ loop = asyncio.get_running_loop() await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).delete_dir(path) + None, lambda: super(AsyncSandboxFilesystem, self).delete_dir(path) ) async def rename_file(self, old_path: str, new_path: str) -> None: @@ -430,8 +432,8 @@ async def rename_file(self, old_path: str, new_path: str) -> None: """ loop = asyncio.get_running_loop() await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).rename_file(old_path, new_path) + None, + lambda: super(AsyncSandboxFilesystem, self).rename_file(old_path, new_path), ) async def move_file(self, source_path: str, destination_path: str) -> None: @@ -444,8 +446,10 @@ async def move_file(self, source_path: str, destination_path: str) -> None: """ loop = asyncio.get_running_loop() await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).move_file(source_path, destination_path) + None, + lambda: super(AsyncSandboxFilesystem, self).move_file( + source_path, destination_path + ), ) async def write_files(self, files: List[Dict[str, str]]) -> None: @@ -465,24 +469,21 @@ async def exists(self, path: str) -> bool: """Check if file/directory exists asynchronously""" loop = asyncio.get_running_loop() return await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).exists(path) + None, lambda: super(AsyncSandboxFilesystem, self).exists(path) ) async def is_file(self, path: str) -> bool: """Check if path is a file asynchronously""" loop = asyncio.get_running_loop() return await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).is_file(path) + None, lambda: super(AsyncSandboxFilesystem, self).is_file(path) ) async def is_dir(self, path: str) -> bool: """Check if path is a directory asynchronously""" loop = asyncio.get_running_loop() return await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).is_dir(path) + None, lambda: super(AsyncSandboxFilesystem, self).is_dir(path) ) async def upload_file(self, local_path: str, remote_path: str) -> None: @@ -495,8 +496,10 @@ async def upload_file(self, local_path: str, remote_path: str) -> None: """ loop = asyncio.get_running_loop() await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).upload_file(local_path, remote_path) + None, + lambda: super(AsyncSandboxFilesystem, self).upload_file( + local_path, remote_path + ), ) async def download_file(self, remote_path: str, local_path: str) -> None: @@ -509,8 +512,10 @@ async def download_file(self, remote_path: str, local_path: str) -> None: """ loop = asyncio.get_running_loop() await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).download_file(remote_path, local_path) + None, + lambda: super(AsyncSandboxFilesystem, self).download_file( + remote_path, local_path + ), ) async def ls(self, path: str = ".") -> List[str]: @@ -535,8 +540,7 @@ async def rm(self, path: str, recursive: bool = False) -> None: """ loop = asyncio.get_running_loop() await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).rm(path, recursive) + None, lambda: super(AsyncSandboxFilesystem, self).rm(path, recursive) ) def open(self, path: str, mode: str = "r") -> "AsyncSandboxFileIO": diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 985fabf8..0ee9ae7c 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -16,10 +16,10 @@ build_env_vars, create_deployment_definition, create_docker_source, + create_koyeb_sandbox_ports, + create_koyeb_sandbox_routes, get_api_client, is_sandbox_healthy, - create_koyeb_sandbox_ports, - create_koyeb_sandbox_routes ) @@ -129,7 +129,7 @@ def _create_sync( # Generate secure sandbox secret sandbox_secret = secrets.token_urlsafe(32) - + # Add SANDBOX_SECRET to environment variables if env is None: env = {} @@ -220,12 +220,12 @@ def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: if sandbox_url is None: time.sleep(poll_interval) continue - + is_healthy = is_sandbox_healthy( - self.instance_id, + self.instance_id, sandbox_url=sandbox_url, sandbox_secret=self.sandbox_secret, - api_token=self.api_token + api_token=self.api_token, ) if is_healthy: @@ -245,12 +245,13 @@ def get_sandbox_url(self) -> Optional[str]: """ Get the public URL of the sandbox. Caches the URL after first retrieval. - + Returns: Optional[str]: The sandbox URL or None if unavailable """ if self._sandbox_url is None: from .utils import get_sandbox_url + self._sandbox_url = get_sandbox_url(self.service_id, self.api_token) return self._sandbox_url @@ -265,10 +266,10 @@ def is_healthy(self) -> bool: """Check if sandbox is healthy and ready for operations""" sandbox_url = self.get_sandbox_url() return is_sandbox_healthy( - self.instance_id, + self.instance_id, sandbox_url=sandbox_url, sandbox_secret=self.sandbox_secret, - api_token=self.api_token + api_token=self.api_token, ) @property @@ -378,9 +379,7 @@ async def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> boo while time.time() - start_time < timeout: loop = asyncio.get_running_loop() - is_healthy = await loop.run_in_executor( - None, super().is_healthy - ) + is_healthy = await loop.run_in_executor(None, super().is_healthy) if is_healthy: return True @@ -397,17 +396,13 @@ async def delete(self) -> None: async def status(self) -> str: """Get current sandbox status asynchronously""" loop = asyncio.get_running_loop() - status_value = await loop.run_in_executor( - None, super().status - ) + status_value = await loop.run_in_executor(None, super().status) return status_value async def is_healthy(self) -> bool: """Check if sandbox is healthy and ready for operations asynchronously""" loop = asyncio.get_running_loop() - return await loop.run_in_executor( - None, super().is_healthy - ) + return await loop.run_in_executor(None, super().is_healthy) @property def exec(self): @@ -422,4 +417,3 @@ def filesystem(self): from .filesystem import AsyncSandboxFilesystem return AsyncSandboxFilesystem(self) - From acbd8279612bd49de6dea0810ef87111f49f5b7a Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 16:13:40 +0100 Subject: [PATCH 23/47] cleanup / refactor --- koyeb/sandbox/exec.py | 9 ++++- koyeb/sandbox/executor_client.py | 3 +- koyeb/sandbox/filesystem.py | 16 +++++--- koyeb/sandbox/sandbox.py | 27 +++++++------ koyeb/sandbox/utils.py | 66 ++++++++++++-------------------- 5 files changed, 59 insertions(+), 62 deletions(-) diff --git a/koyeb/sandbox/exec.py b/koyeb/sandbox/exec.py index ea940435..9468b4be 100644 --- a/koyeb/sandbox/exec.py +++ b/koyeb/sandbox/exec.py @@ -5,15 +5,20 @@ Using SandboxClient HTTP API """ +from __future__ import annotations + import asyncio import time from dataclasses import dataclass from enum import Enum -from typing import Callable, Dict, List, Optional +from typing import TYPE_CHECKING, Callable, Dict, List, Optional from .executor_client import SandboxClient from .utils import SandboxError +if TYPE_CHECKING: + from .sandbox import Sandbox + class CommandStatus(str, Enum): """Command execution status""" @@ -62,7 +67,7 @@ class SandboxExecutor: For async usage, use AsyncSandboxExecutor instead. """ - def __init__(self, sandbox): + def __init__(self, sandbox: Sandbox) -> None: self.sandbox = sandbox self._client = None diff --git a/koyeb/sandbox/executor_client.py b/koyeb/sandbox/executor_client.py index ca0d7ab0..b0731866 100644 --- a/koyeb/sandbox/executor_client.py +++ b/koyeb/sandbox/executor_client.py @@ -4,6 +4,7 @@ A simple Python client for interacting with the Sandbox Executor API. """ +import json import time from typing import Any, Dict, Iterator, Optional @@ -153,8 +154,6 @@ def run_streaming( ... elif "code" in event: ... print(f"Exit code: {event['code']}") """ - import json - payload = {"cmd": cmd} if cwd is not None: payload["cwd"] = cwd diff --git a/koyeb/sandbox/filesystem.py b/koyeb/sandbox/filesystem.py index fabcbbb1..8eefa4af 100644 --- a/koyeb/sandbox/filesystem.py +++ b/koyeb/sandbox/filesystem.py @@ -5,14 +5,20 @@ Using SandboxClient HTTP API """ +from __future__ import annotations + import asyncio import os from dataclasses import dataclass -from typing import Dict, List, Union +from typing import TYPE_CHECKING, Dict, List, Union from .executor_client import SandboxClient from .utils import SandboxError +if TYPE_CHECKING: + from .exec import SandboxExecutor + from .sandbox import Sandbox + class SandboxFilesystemError(SandboxError): """Base exception for filesystem operations""" @@ -42,7 +48,7 @@ class SandboxFilesystem: For async usage, use AsyncSandboxFilesystem instead. """ - def __init__(self, sandbox): + def __init__(self, sandbox: Sandbox) -> None: self.sandbox = sandbox self._client = None self._executor = None @@ -58,7 +64,7 @@ def _get_client(self) -> SandboxClient: self._client = SandboxClient(sandbox_url, self.sandbox.sandbox_secret) return self._client - def _get_executor(self): + def _get_executor(self) -> SandboxExecutor: """Get or create SandboxExecutor instance""" if self._executor is None: from .exec import SandboxExecutor @@ -315,7 +321,7 @@ def rm(self, path: str, recursive: bool = False) -> None: raise FileNotFoundError(f"File not found: {path}") raise SandboxFilesystemError(f"Failed to remove: {result.stderr}") - def open(self, path: str, mode: str = "r") -> "SandboxFileIO": + def open(self, path: str, mode: str = "r") -> SandboxFileIO: """ Open a file in the sandbox synchronously. @@ -543,7 +549,7 @@ async def rm(self, path: str, recursive: bool = False) -> None: None, lambda: super(AsyncSandboxFilesystem, self).rm(path, recursive) ) - def open(self, path: str, mode: str = "r") -> "AsyncSandboxFileIO": + def open(self, path: str, mode: str = "r") -> AsyncSandboxFileIO: """ Open a file in the sandbox asynchronously. diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 0ee9ae7c..dd7bf72a 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -4,10 +4,13 @@ Koyeb Sandbox - Python SDK for creating and managing Koyeb sandboxes """ +from __future__ import annotations + import asyncio +import os import secrets import time -from typing import Dict, List, Optional +from typing import TYPE_CHECKING, Dict, List, Optional from koyeb.api.models.create_app import CreateApp from koyeb.api.models.deployment_port import DeploymentPort @@ -22,6 +25,10 @@ is_sandbox_healthy, ) +if TYPE_CHECKING: + from .exec import AsyncSandboxExecutor, SandboxExecutor + from .filesystem import AsyncSandboxFilesystem, SandboxFilesystem + class Sandbox: """ @@ -61,7 +68,7 @@ def create( regions: Optional[List[str]] = None, api_token: Optional[str] = None, timeout: int = 300, - ) -> "Sandbox": + ) -> Sandbox: """ Create a new sandbox instance. @@ -80,8 +87,6 @@ def create( Sandbox: A new Sandbox instance """ if api_token is None: - import os - api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: raise ValueError( @@ -115,7 +120,7 @@ def _create_sync( regions: Optional[List[str]] = None, api_token: Optional[str] = None, timeout: int = 300, - ) -> "Sandbox": + ) -> Sandbox: """ Synchronous creation method that returns creation parameters. Subclasses can override to return their own type. @@ -273,14 +278,14 @@ def is_healthy(self) -> bool: ) @property - def filesystem(self): + def filesystem(self) -> SandboxFilesystem: """Get filesystem operations interface""" from .filesystem import SandboxFilesystem return SandboxFilesystem(self) @property - def exec(self): + def exec(self) -> SandboxExecutor: """Get command execution interface""" from .exec import SandboxExecutor @@ -305,7 +310,7 @@ async def create( regions: Optional[List[str]] = None, api_token: Optional[str] = None, timeout: int = 300, - ) -> "AsyncSandbox": + ) -> AsyncSandbox: """ Create a new sandbox instance with async support. @@ -324,8 +329,6 @@ async def create( AsyncSandbox: A new AsyncSandbox instance """ if api_token is None: - import os - api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: raise ValueError( @@ -405,14 +408,14 @@ async def is_healthy(self) -> bool: return await loop.run_in_executor(None, super().is_healthy) @property - def exec(self): + def exec(self) -> AsyncSandboxExecutor: """Get async command execution interface""" from .exec import AsyncSandboxExecutor return AsyncSandboxExecutor(self) @property - def filesystem(self): + def filesystem(self) -> AsyncSandboxFilesystem: """Get filesystem operations interface""" from .filesystem import AsyncSandboxFilesystem diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index f15106e2..33f8f534 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -92,11 +92,11 @@ def create_docker_source(image: str, command_args: List[str]) -> DockerSource: def create_koyeb_sandbox_ports() -> List[DeploymentPort]: """ Create port configuration for koyeb/sandbox image. - + Creates two ports: - Port 3030 exposed on HTTP, mounted on /koyeb-sandbox/ - Port 3031 exposed on HTTP, mounted on / - + Returns: List of DeploymentPort objects configured for koyeb/sandbox """ @@ -108,30 +108,24 @@ def create_koyeb_sandbox_ports() -> List[DeploymentPort]: DeploymentPort( port=3031, protocol="http", - ) + ), ] def create_koyeb_sandbox_routes() -> List[DeploymentRoute]: """ Create route configuration for koyeb/sandbox image to make it publicly accessible. - + Creates two routes: - Port 3030 accessible at /koyeb-sandbox/ - Port 3031 accessible at / - + Returns: List of DeploymentRoute objects configured for koyeb/sandbox """ return [ - DeploymentRoute( - port=3030, - path="/koyeb-sandbox/" - ), - DeploymentRoute( - port=3031, - path="/" - ) + DeploymentRoute(port=3030, path="/koyeb-sandbox/"), + DeploymentRoute(port=3031, path="/"), ] @@ -194,23 +188,23 @@ def get_sandbox_status( def get_sandbox_url(service_id: str, api_token: Optional[str] = None) -> Optional[str]: """ Get the public URL of a sandbox service. - + Returns the URL with /koyeb-sandbox path prepended since the sandbox executor API is exposed on port 3030 which is mounted at /koyeb-sandbox/. """ try: _, services_api, _ = get_api_client(api_token) service_response = services_api.get_service(service_id) - + # Get the service app URL (this would be like: app-name-org.koyeb.app) # The URL is typically constructed from the app name and organization service = service_response.service - + if service.app_id: apps_api, _, _ = get_api_client(api_token) app_response = apps_api.get_app(service.app_id) app = app_response.app - if hasattr(app, 'domains') and app.domains: + if hasattr(app, "domains") and app.domains: # Use the first public domain return f"https://{app.domains[0].name}/koyeb-sandbox" return None @@ -219,27 +213,27 @@ def get_sandbox_url(service_id: str, api_token: Optional[str] = None) -> Optiona def is_sandbox_healthy( - instance_id: str, + instance_id: str, sandbox_url: str, sandbox_secret: str, - api_token: Optional[str] = None + api_token: Optional[str] = None, ) -> bool: """ Check if sandbox is healthy and ready for operations. - + This function requires both sandbox_url and sandbox_secret to properly check: 1. The Koyeb instance status (via API) - using instance_id and api_token 2. The sandbox executor health endpoint (via SandboxClient) - using sandbox_url and sandbox_secret - + Args: instance_id: The Koyeb instance ID api_token: Koyeb API token sandbox_url: URL of the sandbox executor API (required) sandbox_secret: Secret for sandbox executor authentication (required) - + Returns: bool: True if sandbox is healthy, False otherwise - + Raises: ValueError: If sandbox_url or sandbox_secret are not provided """ @@ -247,14 +241,16 @@ def is_sandbox_healthy( raise ValueError("sandbox_url is required for health check") if not sandbox_secret: raise ValueError("sandbox_secret is required for health check") - + # Check Koyeb instance status - instance_healthy = get_sandbox_status(instance_id, api_token) == InstanceStatus.HEALTHY - + instance_healthy = ( + get_sandbox_status(instance_id, api_token) == InstanceStatus.HEALTHY + ) + # If instance is not healthy, no need to check executor if not instance_healthy: return False - + # Check executor health try: client = SandboxClient(sandbox_url, sandbox_secret) @@ -262,25 +258,13 @@ def is_sandbox_healthy( # Check if health response indicates the server is healthy # The exact response format may vary, but typically has a "status" field if isinstance(health_response, dict): - status = health_response.get('status', '').lower() - return status in ['ok', 'healthy', 'ready'] + status = health_response.get("status", "").lower() + return status in ["ok", "healthy", "ready"] return True # If we got a response, consider it healthy except Exception: # If we can't reach the executor API, consider it unhealthy return False -def ensure_sandbox_healthy(instance_id: str, api_token: Optional[str] = None) -> None: - """Ensure a sandbox instance is healthy, raising an exception if not.""" - status = get_sandbox_status(instance_id, api_token) - - if status == InstanceStatus.ERROR: - raise SandboxError("Sandbox is in error state") - elif status in [InstanceStatus.STOPPING, InstanceStatus.STOPPED]: - raise SandboxError(f"Sandbox is {status.value}, cannot perform operations") - elif status != InstanceStatus.HEALTHY: - raise SandboxError(f"Sandbox is not healthy (status: {status.value})") - - class SandboxError(Exception): """Base exception for sandbox operations""" From 1ec5de3dbfd31958547fb660ad5a111d41a01dab Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 16:41:37 +0100 Subject: [PATCH 24/47] add idle_timeout support --- koyeb/sandbox/executor_client.py | 2 - koyeb/sandbox/sandbox.py | 29 ++++- koyeb/sandbox/utils.py | 206 +++++++++++++++++++++++++++++-- 3 files changed, 224 insertions(+), 13 deletions(-) diff --git a/koyeb/sandbox/executor_client.py b/koyeb/sandbox/executor_client.py index b0731866..0c4f8a03 100644 --- a/koyeb/sandbox/executor_client.py +++ b/koyeb/sandbox/executor_client.py @@ -90,8 +90,6 @@ def health(self) -> Dict[str, str]: """ response = requests.get(f"{self.base_url}/health") response.raise_for_status() - if response.status_code != 200: - return {"status": "unhealthy"} return response.json() def run( diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index dd7bf72a..0fe78a7d 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -16,6 +16,7 @@ from koyeb.api.models.deployment_port import DeploymentPort from .utils import ( + IdleTimeout, build_env_vars, create_deployment_definition, create_docker_source, @@ -68,6 +69,7 @@ def create( regions: Optional[List[str]] = None, api_token: Optional[str] = None, timeout: int = 300, + idle_timeout: Optional[IdleTimeout] = None, ) -> Sandbox: """ Create a new sandbox instance. @@ -82,6 +84,11 @@ def create( regions: List of regions to deploy to (default: ["na"]) api_token: Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) timeout: Timeout for sandbox creation in seconds + idle_timeout: Idle timeout configuration for scale-to-zero + - None: Auto-enable (light_sleep=300s, deep_sleep=600s) + - 0: Disable scale-to-zero (keep always-on) + - int > 0: Deep sleep only (e.g., 600 for 600s deep sleep) + - dict: Explicit configuration with {"light_sleep": 300, "deep_sleep": 600} Returns: Sandbox: A new Sandbox instance @@ -102,6 +109,7 @@ def create( regions=regions, api_token=api_token, timeout=timeout, + idle_timeout=idle_timeout, ) if wait_ready: @@ -120,12 +128,13 @@ def _create_sync( regions: Optional[List[str]] = None, api_token: Optional[str] = None, timeout: int = 300, + idle_timeout: Optional[IdleTimeout] = None, ) -> Sandbox: """ Synchronous creation method that returns creation parameters. Subclasses can override to return their own type. """ - apps_api, services_api, _ = get_api_client(api_token) + apps_api, services_api, _, catalog_instances_api = get_api_client(api_token) # Auto-configure ports for koyeb/sandbox image if not explicitly provided if ports is None: @@ -140,6 +149,13 @@ def _create_sync( env = {} env["SANDBOX_SECRET"] = sandbox_secret + # Check if light sleep is enabled for this instance type + from .utils import _is_light_sleep_enabled + + light_sleep_enabled = _is_light_sleep_enabled( + instance_type, catalog_instances_api + ) + app_name = f"sandbox-app-{name}-{int(time.time())}" app_response = apps_api.create_app(app=CreateApp(name=app_name)) app_id = app_response.app.id @@ -154,6 +170,8 @@ def _create_sync( ports=ports, regions=regions, routes=routes, + idle_timeout=idle_timeout, + light_sleep_enabled=light_sleep_enabled, ) from koyeb.api.models.create_service import CreateService @@ -242,7 +260,7 @@ def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: def delete(self) -> None: """Delete the sandbox instance.""" - apps_api, services_api, _ = get_api_client(self.api_token) + apps_api, services_api, _, _ = get_api_client(self.api_token) services_api.delete_service(self.service_id) apps_api.delete_app(self.app_id) @@ -310,6 +328,7 @@ async def create( regions: Optional[List[str]] = None, api_token: Optional[str] = None, timeout: int = 300, + idle_timeout: Optional[IdleTimeout] = None, ) -> AsyncSandbox: """ Create a new sandbox instance with async support. @@ -324,6 +343,11 @@ async def create( regions: List of regions to deploy to (default: ["na"]) api_token: Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) timeout: Timeout for sandbox creation in seconds + idle_timeout: Idle timeout configuration for scale-to-zero + - None: Auto-enable (light_sleep=300s, deep_sleep=600s) + - 0: Disable scale-to-zero (keep always-on) + - int > 0: Deep sleep only (e.g., 600 for 600s deep sleep) + - dict: Explicit configuration with {"light_sleep": 300, "deep_sleep": 600} Returns: AsyncSandbox: A new AsyncSandbox instance @@ -347,6 +371,7 @@ async def create( regions=regions, api_token=api_token, timeout=timeout, + idle_timeout=idle_timeout, ), ) diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index 33f8f534..37d9121a 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -5,10 +5,10 @@ """ import os -from typing import Dict, List, Optional +from typing import Dict, List, Literal, Optional, TypedDict, Union from koyeb.api import ApiClient, Configuration -from koyeb.api.api import AppsApi, InstancesApi, ServicesApi +from koyeb.api.api import AppsApi, CatalogInstancesApi, InstancesApi, ServicesApi from koyeb.api.exceptions import ApiException, NotFoundException from koyeb.api.models.deployment_definition import DeploymentDefinition from koyeb.api.models.deployment_definition_type import DeploymentDefinitionType @@ -17,15 +17,36 @@ from koyeb.api.models.deployment_port import DeploymentPort from koyeb.api.models.deployment_route import DeploymentRoute from koyeb.api.models.deployment_scaling import DeploymentScaling +from koyeb.api.models.deployment_scaling_target import DeploymentScalingTarget +from koyeb.api.models.deployment_scaling_target_sleep_idle_delay import ( + DeploymentScalingTargetSleepIdleDelay, +) from koyeb.api.models.docker_source import DockerSource from koyeb.api.models.instance_status import InstanceStatus from .executor_client import SandboxClient +# Type definitions for idle timeout +IdleTimeoutSeconds = int + + +class IdleTimeoutConfig(TypedDict, total=False): + """Configuration for idle timeout with light and deep sleep.""" + + light_sleep: IdleTimeoutSeconds # Optional, but if provided, deep_sleep is required + deep_sleep: IdleTimeoutSeconds # Required + + +IdleTimeout = Union[ + Literal[0], # Disable scale-to-zero + IdleTimeoutSeconds, # Deep sleep only (standard and GPU instances) + IdleTimeoutConfig, # Explicit light_sleep/deep_sleep configuration +] + def get_api_client( api_token: Optional[str] = None, host: Optional[str] = None -) -> tuple[AppsApi, ServicesApi, InstancesApi]: +) -> tuple[AppsApi, ServicesApi, InstancesApi, CatalogInstancesApi]: """ Get configured API clients for Koyeb operations. @@ -34,7 +55,7 @@ def get_api_client( host: Koyeb API host URL. If not provided, will try to get from KOYEB_API_HOST env var (defaults to https://app.koyeb.com) Returns: - Tuple of (AppsApi, ServicesApi, InstancesApi) instances + Tuple of (AppsApi, ServicesApi, InstancesApi, CatalogInstancesApi) instances Raises: ValueError: If API token is not provided @@ -51,7 +72,12 @@ def get_api_client( configuration.api_key_prefix["Bearer"] = "Bearer" api_client = ApiClient(configuration) - return AppsApi(api_client), ServicesApi(api_client), InstancesApi(api_client) + return ( + AppsApi(api_client), + ServicesApi(api_client), + InstancesApi(api_client), + CatalogInstancesApi(api_client), + ) def build_env_vars(env: Optional[Dict[str, str]]) -> List[DeploymentEnv]: @@ -129,6 +155,150 @@ def create_koyeb_sandbox_routes() -> List[DeploymentRoute]: ] +def _validate_idle_timeout(idle_timeout: Optional[IdleTimeout]) -> None: + """ + Validate idle_timeout parameter according to spec. + + Raises: + ValueError: If validation fails + """ + if idle_timeout is None: + return + + if isinstance(idle_timeout, int): + if idle_timeout < 0: + raise ValueError("idle_timeout must be >= 0") + if idle_timeout > 0: + # Deep sleep only - valid + return + # idle_timeout == 0 means disable scale-to-zero - valid + return + + if isinstance(idle_timeout, dict): + if "deep_sleep" not in idle_timeout: + raise ValueError( + "idle_timeout dict must contain 'deep_sleep' key (at minimum)" + ) + + deep_sleep = idle_timeout.get("deep_sleep") + if deep_sleep is None or not isinstance(deep_sleep, int) or deep_sleep <= 0: + raise ValueError("deep_sleep must be a positive integer") + + if "light_sleep" in idle_timeout: + light_sleep = idle_timeout.get("light_sleep") + if ( + light_sleep is None + or not isinstance(light_sleep, int) + or light_sleep <= 0 + ): + raise ValueError("light_sleep must be a positive integer") + + if deep_sleep < light_sleep: + raise ValueError( + "deep_sleep must be >= light_sleep when both are provided" + ) + + +def _is_light_sleep_enabled( + instance_type: str, + catalog_instances_api: Optional[CatalogInstancesApi] = None, +) -> bool: + """ + Check if light sleep is enabled for the instance type using API or fallback. + + Args: + instance_type: Instance type string + catalog_instances_api: Optional CatalogInstancesApi client (if None, will try to create one) + + Returns: + True if light sleep is enabled, False otherwise (defaults to True if API call fails) + """ + try: + if catalog_instances_api is None: + _, _, _, catalog_instances_api = get_api_client(None) + response = catalog_instances_api.get_catalog_instance(id=instance_type) + if response and response.instance: + return response.instance.light_sleep_enabled or False + except (ApiException, NotFoundException): + # If API call fails, default to True (assume light sleep is enabled) + pass + except Exception: + # Any other error, default to True (assume light sleep is enabled) + pass + # Default to True if we can't determine from API + return True + + +def _process_idle_timeout( + idle_timeout: Optional[IdleTimeout], + light_sleep_enabled: bool = True, +) -> Optional[DeploymentScalingTargetSleepIdleDelay]: + """ + Process idle_timeout parameter and convert to DeploymentScalingTargetSleepIdleDelay. + + According to spec: + - If unsupported instance type: idle_timeout is silently ignored (returns None) + - None (default): Auto-enable light_sleep=300s, deep_sleep=600s + - 0: Explicitly disable scale-to-zero (returns None) + - int > 0: Deep sleep only + - dict: Explicit configuration + - If light_sleep_enabled is False for the instance type, light_sleep is ignored + + Args: + idle_timeout: Idle timeout configuration + light_sleep_enabled: Whether light sleep is enabled for the instance type (default: True) + + Returns: + DeploymentScalingTargetSleepIdleDelay or None if disabled/ignored + """ + # Validate the parameter + _validate_idle_timeout(idle_timeout) + + # Process according to spec + if idle_timeout is None: + # Default: Auto-enable light_sleep=300s, deep_sleep=600s + # If light sleep is not enabled, only use deep_sleep + if not light_sleep_enabled: + return DeploymentScalingTargetSleepIdleDelay( + deep_sleep_value=600, + ) + return DeploymentScalingTargetSleepIdleDelay( + light_sleep_value=300, + deep_sleep_value=600, + ) + + if isinstance(idle_timeout, int): + if idle_timeout == 0: + # Explicitly disable scale-to-zero + return None + # Deep sleep only + return DeploymentScalingTargetSleepIdleDelay( + deep_sleep_value=idle_timeout, + ) + + if isinstance(idle_timeout, dict): + deep_sleep = idle_timeout.get("deep_sleep") + light_sleep = idle_timeout.get("light_sleep") + + # If light sleep is not enabled, ignore light_sleep if provided + if not light_sleep_enabled: + return DeploymentScalingTargetSleepIdleDelay( + deep_sleep_value=deep_sleep, + ) + + if light_sleep is not None: + # Both light_sleep and deep_sleep provided + return DeploymentScalingTargetSleepIdleDelay( + light_sleep_value=light_sleep, + deep_sleep_value=deep_sleep, + ) + else: + # Deep sleep only + return DeploymentScalingTargetSleepIdleDelay( + deep_sleep_value=deep_sleep, + ) + + def create_deployment_definition( name: str, docker_source: DockerSource, @@ -137,6 +307,8 @@ def create_deployment_definition( ports: Optional[List[DeploymentPort]] = None, regions: List[str] = None, routes: Optional[List[DeploymentRoute]] = None, + idle_timeout: Optional[IdleTimeout] = None, + light_sleep_enabled: bool = True, ) -> DeploymentDefinition: """ Create deployment definition for a sandbox service. @@ -149,6 +321,8 @@ def create_deployment_definition( ports: List of ports (if provided, type becomes WEB, otherwise WORKER) regions: List of regions (defaults to North America) routes: List of routes for public access + idle_timeout: Idle timeout configuration (see IdleTimeout type) + light_sleep_enabled: Whether light sleep is enabled for the instance type (default: True) Returns: DeploymentDefinition object @@ -160,6 +334,20 @@ def create_deployment_definition( DeploymentDefinitionType.WEB if ports else DeploymentDefinitionType.WORKER ) + # Process idle_timeout + sleep_idle_delay = _process_idle_timeout(idle_timeout, light_sleep_enabled) + + # Create scaling configuration + # If idle_timeout is 0, explicitly disable scale-to-zero (min=1, always-on) + # Otherwise (None, int > 0, or dict), enable scale-to-zero (min=0) + min_scale = 1 if idle_timeout == 0 else 0 + targets = None + if sleep_idle_delay is not None: + scaling_target = DeploymentScalingTarget(sleep_idle_delay=sleep_idle_delay) + targets = [scaling_target] + + scalings = [DeploymentScaling(min=min_scale, max=1, targets=targets)] + return DeploymentDefinition( name=name, type=deployment_type, @@ -168,7 +356,7 @@ def create_deployment_definition( ports=ports, routes=routes, instance_types=[DeploymentInstanceType(type=instance_type)], - scalings=[DeploymentScaling(min=1, max=1)], + scalings=scalings, regions=regions, ) @@ -178,7 +366,7 @@ def get_sandbox_status( ) -> InstanceStatus: """Get the current status of a sandbox instance.""" try: - _, _, instances_api = get_api_client(api_token) + _, _, instances_api, _ = get_api_client(api_token) instance_response = instances_api.get_instance(instance_id) return instance_response.instance.status except (NotFoundException, ApiException, Exception): @@ -193,7 +381,7 @@ def get_sandbox_url(service_id: str, api_token: Optional[str] = None) -> Optiona executor API is exposed on port 3030 which is mounted at /koyeb-sandbox/. """ try: - _, services_api, _ = get_api_client(api_token) + _, services_api, _, _ = get_api_client(api_token) service_response = services_api.get_service(service_id) # Get the service app URL (this would be like: app-name-org.koyeb.app) @@ -201,7 +389,7 @@ def get_sandbox_url(service_id: str, api_token: Optional[str] = None) -> Optiona service = service_response.service if service.app_id: - apps_api, _, _ = get_api_client(api_token) + apps_api, _, _, _ = get_api_client(api_token) app_response = apps_api.get_app(service.app_id) app = app_response.app if hasattr(app, "domains") and app.domains: From a09db79f8e1f3de1151d1c531152e5e2974c0975 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 19:50:11 +0100 Subject: [PATCH 25/47] add an option to specify the exposed port protocol --- koyeb/sandbox/sandbox.py | 26 ++++++++-------- koyeb/sandbox/utils.py | 65 +++++++++++++++++++++++++++++++++------- 2 files changed, 68 insertions(+), 23 deletions(-) diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 0fe78a7d..224bb897 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -13,14 +13,12 @@ from typing import TYPE_CHECKING, Dict, List, Optional from koyeb.api.models.create_app import CreateApp -from koyeb.api.models.deployment_port import DeploymentPort from .utils import ( IdleTimeout, build_env_vars, create_deployment_definition, create_docker_source, - create_koyeb_sandbox_ports, create_koyeb_sandbox_routes, get_api_client, is_sandbox_healthy, @@ -64,7 +62,7 @@ def create( name: str = "quick-sandbox", wait_ready: bool = True, instance_type: str = "nano", - ports: Optional[List[DeploymentPort]] = None, + exposed_port_protocol: Optional[str] = None, env: Optional[Dict[str, str]] = None, regions: Optional[List[str]] = None, api_token: Optional[str] = None, @@ -79,7 +77,9 @@ def create( name: Name of the sandbox wait_ready: Wait for sandbox to be ready (default: True) instance_type: Instance type (default: nano) - ports: List of ports to expose + exposed_port_protocol: Protocol to expose ports with ("http" or "http2"). + If None, defaults to "http". + If provided, must be one of "http" or "http2". env: Environment variables regions: List of regions to deploy to (default: ["na"]) api_token: Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) @@ -104,7 +104,7 @@ def create( name=name, image=image, instance_type=instance_type, - ports=ports, + exposed_port_protocol=exposed_port_protocol, env=env, regions=regions, api_token=api_token, @@ -123,7 +123,7 @@ def _create_sync( name: str, image: str = "koyeb/sandbox", instance_type: str = "nano", - ports: Optional[List[DeploymentPort]] = None, + exposed_port_protocol: Optional[str] = None, env: Optional[Dict[str, str]] = None, regions: Optional[List[str]] = None, api_token: Optional[str] = None, @@ -136,9 +136,7 @@ def _create_sync( """ apps_api, services_api, _, catalog_instances_api = get_api_client(api_token) - # Auto-configure ports for koyeb/sandbox image if not explicitly provided - if ports is None: - ports = create_koyeb_sandbox_ports() + # Always create routes (ports are always exposed, default to "http") routes = create_koyeb_sandbox_routes() # Generate secure sandbox secret @@ -167,7 +165,7 @@ def _create_sync( docker_source=docker_source, env_vars=env_vars, instance_type=instance_type, - ports=ports, + exposed_port_protocol=exposed_port_protocol, regions=regions, routes=routes, idle_timeout=idle_timeout, @@ -323,7 +321,7 @@ async def create( name: str = "quick-sandbox", wait_ready: bool = True, instance_type: str = "nano", - ports: Optional[List[DeploymentPort]] = None, + exposed_port_protocol: Optional[str] = None, env: Optional[Dict[str, str]] = None, regions: Optional[List[str]] = None, api_token: Optional[str] = None, @@ -338,7 +336,9 @@ async def create( name: Name of the sandbox wait_ready: Wait for sandbox to be ready (default: True) instance_type: Instance type (default: nano) - ports: List of ports to expose + exposed_port_protocol: Protocol to expose ports with ("http" or "http2"). + If None, defaults to "http". + If provided, must be one of "http" or "http2". env: Environment variables regions: List of regions to deploy to (default: ["na"]) api_token: Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) @@ -366,7 +366,7 @@ async def create( name=name, image=image, instance_type=instance_type, - ports=ports, + exposed_port_protocol=exposed_port_protocol, env=env, regions=regions, api_token=api_token, diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index 37d9121a..d73b25a8 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -43,6 +43,41 @@ class IdleTimeoutConfig(TypedDict, total=False): IdleTimeoutConfig, # Explicit light_sleep/deep_sleep configuration ] +# Valid protocols for DeploymentPort (from OpenAPI spec: http, http2, tcp) +# For sandboxes, we only support http and http2 +VALID_DEPLOYMENT_PORT_PROTOCOLS = ("http", "http2") + + +def _validate_port_protocol(protocol: str) -> str: + """ + Validate port protocol using API model structure. + + Args: + protocol: Protocol string to validate + + Returns: + Validated protocol string + + Raises: + ValueError: If protocol is invalid + """ + # Validate by attempting to create a DeploymentPort instance + # This ensures we're using the API model's validation structure + try: + port = DeploymentPort(port=3030, protocol=protocol) + # Additional validation: check if protocol is in allowed values + if protocol not in VALID_DEPLOYMENT_PORT_PROTOCOLS: + raise ValueError( + f"Invalid protocol '{protocol}'. Must be one of {VALID_DEPLOYMENT_PORT_PROTOCOLS}" + ) + return port.protocol or "http" + except Exception as e: + if isinstance(e, ValueError): + raise + raise ValueError( + f"Invalid protocol '{protocol}'. Must be one of {VALID_DEPLOYMENT_PORT_PROTOCOLS}" + ) from e + def get_api_client( api_token: Optional[str] = None, host: Optional[str] = None @@ -115,13 +150,16 @@ def create_docker_source(image: str, command_args: List[str]) -> DockerSource: ) -def create_koyeb_sandbox_ports() -> List[DeploymentPort]: +def create_koyeb_sandbox_ports(protocol: str = "http") -> List[DeploymentPort]: """ Create port configuration for koyeb/sandbox image. Creates two ports: - Port 3030 exposed on HTTP, mounted on /koyeb-sandbox/ - - Port 3031 exposed on HTTP, mounted on / + - Port 3031 exposed with the specified protocol, mounted on / + + Args: + protocol: Protocol to use for port 3031 ("http" or "http2"), defaults to "http" Returns: List of DeploymentPort objects configured for koyeb/sandbox @@ -133,7 +171,7 @@ def create_koyeb_sandbox_ports() -> List[DeploymentPort]: ), DeploymentPort( port=3031, - protocol="http", + protocol=protocol, ), ] @@ -304,7 +342,7 @@ def create_deployment_definition( docker_source: DockerSource, env_vars: List[DeploymentEnv], instance_type: str, - ports: Optional[List[DeploymentPort]] = None, + exposed_port_protocol: Optional[str] = None, regions: List[str] = None, routes: Optional[List[DeploymentRoute]] = None, idle_timeout: Optional[IdleTimeout] = None, @@ -318,8 +356,10 @@ def create_deployment_definition( docker_source: Docker configuration env_vars: Environment variables instance_type: Instance type - ports: List of ports (if provided, type becomes WEB, otherwise WORKER) - regions: List of regions (defaults to North America) + exposed_port_protocol: Protocol to expose ports with ("http" or "http2"). + If None, defaults to "http". + If provided, must be one of "http" or "http2". + regions: List of regions (defaults to ["na"]) routes: List of routes for public access idle_timeout: Idle timeout configuration (see IdleTimeout type) light_sleep_enabled: Whether light sleep is enabled for the instance type (default: True) @@ -328,11 +368,16 @@ def create_deployment_definition( DeploymentDefinition object """ if regions is None: - regions = ["eu"] + regions = ["na"] - deployment_type = ( - DeploymentDefinitionType.WEB if ports else DeploymentDefinitionType.WORKER - ) + # Always create ports with protocol (default to "http" if not specified) + protocol = exposed_port_protocol if exposed_port_protocol is not None else "http" + # Validate protocol using API model structure + protocol = _validate_port_protocol(protocol) + ports = create_koyeb_sandbox_ports(protocol) + + # Always use WEB type + deployment_type = DeploymentDefinitionType.WEB # Process idle_timeout sleep_idle_delay = _process_idle_timeout(idle_timeout, light_sleep_enabled) From a5b43ee54c4fc5ac4949b8c5555582eb3eeee550 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 19:57:35 +0100 Subject: [PATCH 26/47] rename get_sandbox_url to _get_sandbox_url --- koyeb/sandbox/exec.py | 2 +- koyeb/sandbox/filesystem.py | 2 +- koyeb/sandbox/sandbox.py | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/koyeb/sandbox/exec.py b/koyeb/sandbox/exec.py index 9468b4be..a3058253 100644 --- a/koyeb/sandbox/exec.py +++ b/koyeb/sandbox/exec.py @@ -74,7 +74,7 @@ def __init__(self, sandbox: Sandbox) -> None: def _get_client(self) -> SandboxClient: """Get or create SandboxClient instance""" if self._client is None: - sandbox_url = self.sandbox.get_sandbox_url() + sandbox_url = self.sandbox._get_sandbox_url() if not sandbox_url: raise SandboxError("Unable to get sandbox URL") if not self.sandbox.sandbox_secret: diff --git a/koyeb/sandbox/filesystem.py b/koyeb/sandbox/filesystem.py index 8eefa4af..6a156736 100644 --- a/koyeb/sandbox/filesystem.py +++ b/koyeb/sandbox/filesystem.py @@ -56,7 +56,7 @@ def __init__(self, sandbox: Sandbox) -> None: def _get_client(self) -> SandboxClient: """Get or create SandboxClient instance""" if self._client is None: - sandbox_url = self.sandbox.get_sandbox_url() + sandbox_url = self.sandbox._get_sandbox_url() if not sandbox_url: raise SandboxError("Unable to get sandbox URL") if not self.sandbox.sandbox_secret: diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 224bb897..e017c254 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -236,7 +236,7 @@ def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: while time.time() - start_time < timeout: # Get sandbox URL on first iteration or if not yet retrieved if sandbox_url is None: - sandbox_url = self.get_sandbox_url() + sandbox_url = self._get_sandbox_url() # If URL is not available yet, wait and retry if sandbox_url is None: time.sleep(poll_interval) @@ -262,9 +262,9 @@ def delete(self) -> None: services_api.delete_service(self.service_id) apps_api.delete_app(self.app_id) - def get_sandbox_url(self) -> Optional[str]: + def _get_sandbox_url(self) -> Optional[str]: """ - Get the public URL of the sandbox. + Internal method to get the sandbox URL for health checks and client initialization. Caches the URL after first retrieval. Returns: @@ -285,7 +285,7 @@ def status(self) -> str: def is_healthy(self) -> bool: """Check if sandbox is healthy and ready for operations""" - sandbox_url = self.get_sandbox_url() + sandbox_url = self._get_sandbox_url() return is_sandbox_healthy( self.instance_id, sandbox_url=sandbox_url, From a015965cdc5142fa4fe7ccdcfd5769d9a62ed26d Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 20:12:30 +0100 Subject: [PATCH 27/47] add get_domain method to sandbox --- koyeb/sandbox/sandbox.py | 20 +++++++++++++++++--- koyeb/sandbox/utils.py | 21 +++++++++++++-------- 2 files changed, 30 insertions(+), 11 deletions(-) diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index e017c254..2f2c02cf 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -262,6 +262,20 @@ def delete(self) -> None: services_api.delete_service(self.service_id) apps_api.delete_app(self.app_id) + def get_domain(self) -> Optional[str]: + """ + Get the public domain of the sandbox. + + Returns the domain name (e.g., "app-name-org.koyeb.app") without protocol or path. + To construct the URL, use: f"https://{sandbox.get_domain()}" + + Returns: + Optional[str]: The domain name or None if unavailable + """ + from .utils import _get_sandbox_domain + + return _get_sandbox_domain(self.service_id, self.api_token) + def _get_sandbox_url(self) -> Optional[str]: """ Internal method to get the sandbox URL for health checks and client initialization. @@ -271,9 +285,9 @@ def _get_sandbox_url(self) -> Optional[str]: Optional[str]: The sandbox URL or None if unavailable """ if self._sandbox_url is None: - from .utils import get_sandbox_url - - self._sandbox_url = get_sandbox_url(self.service_id, self.api_token) + domain = self.get_domain() + if domain: + self._sandbox_url = f"https://{domain}/koyeb-sandbox" return self._sandbox_url def status(self) -> str: diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index d73b25a8..af5bf31e 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -418,19 +418,24 @@ def get_sandbox_status( return InstanceStatus.ERROR -def get_sandbox_url(service_id: str, api_token: Optional[str] = None) -> Optional[str]: +def _get_sandbox_domain( + service_id: str, api_token: Optional[str] = None +) -> Optional[str]: """ - Get the public URL of a sandbox service. + Internal function to get the public domain of a sandbox service. - Returns the URL with /koyeb-sandbox path prepended since the sandbox - executor API is exposed on port 3030 which is mounted at /koyeb-sandbox/. + Returns the domain name (e.g., "app-name-org.koyeb.app") without protocol or path. + + Args: + service_id: The service ID + api_token: Optional API token (if None, will try to get from KOYEB_API_TOKEN env var) + + Returns: + Optional[str]: The domain name or None if unavailable """ try: _, services_api, _, _ = get_api_client(api_token) service_response = services_api.get_service(service_id) - - # Get the service app URL (this would be like: app-name-org.koyeb.app) - # The URL is typically constructed from the app name and organization service = service_response.service if service.app_id: @@ -439,7 +444,7 @@ def get_sandbox_url(service_id: str, api_token: Optional[str] = None) -> Optiona app = app_response.app if hasattr(app, "domains") and app.domains: # Use the first public domain - return f"https://{app.domains[0].name}/koyeb-sandbox" + return app.domains[0].name return None except (NotFoundException, ApiException, Exception): return None From 5c58d7c51ac76914788dd4487756e7e21acb86c2 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 20:39:37 +0100 Subject: [PATCH 28/47] add tcp proxy support --- koyeb/sandbox/sandbox.py | 135 ++++++++++++++++++++++++++++++++++++++- koyeb/sandbox/utils.py | 60 ++++++++--------- 2 files changed, 161 insertions(+), 34 deletions(-) diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 2f2c02cf..59e0ad9e 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -68,6 +68,7 @@ def create( api_token: Optional[str] = None, timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, + enable_tcp_proxy: bool = False, ) -> Sandbox: """ Create a new sandbox instance. @@ -89,6 +90,7 @@ def create( - 0: Disable scale-to-zero (keep always-on) - int > 0: Deep sleep only (e.g., 600 for 600s deep sleep) - dict: Explicit configuration with {"light_sleep": 300, "deep_sleep": 600} + enable_tcp_proxy: If True, enables TCP proxy for direct TCP access to port 3031 Returns: Sandbox: A new Sandbox instance @@ -110,6 +112,7 @@ def create( api_token=api_token, timeout=timeout, idle_timeout=idle_timeout, + enable_tcp_proxy=enable_tcp_proxy, ) if wait_ready: @@ -129,6 +132,7 @@ def _create_sync( api_token: Optional[str] = None, timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, + enable_tcp_proxy: bool = False, ) -> Sandbox: """ Synchronous creation method that returns creation parameters. @@ -170,6 +174,7 @@ def _create_sync( routes=routes, idle_timeout=idle_timeout, light_sleep_enabled=light_sleep_enabled, + enable_tcp_proxy=enable_tcp_proxy, ) from koyeb.api.models.create_service import CreateService @@ -256,6 +261,34 @@ def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: return False + def wait_tcp_proxy_ready( + self, timeout: int = 60, poll_interval: float = 2.0 + ) -> bool: + """ + Wait for TCP proxy to become ready and available. + + Polls the deployment metadata until the TCP proxy information is available. + This is useful when enable_tcp_proxy=True was set during sandbox creation, + as the proxy information may not be immediately available. + + Args: + timeout: Maximum time to wait in seconds + poll_interval: Time between checks in seconds + + Returns: + bool: True if TCP proxy became ready, False if timeout + """ + start_time = time.time() + + while time.time() - start_time < timeout: + tcp_proxy_info = self.get_tcp_proxy_info() + if tcp_proxy_info is not None: + return True + + time.sleep(poll_interval) + + return False + def delete(self) -> None: """Delete the sandbox instance.""" apps_api, services_api, _, _ = get_api_client(self.api_token) @@ -272,9 +305,73 @@ def get_domain(self) -> Optional[str]: Returns: Optional[str]: The domain name or None if unavailable """ - from .utils import _get_sandbox_domain + try: + from koyeb.api.exceptions import ApiException, NotFoundException + + from .utils import get_api_client + + _, services_api, _, _ = get_api_client(self.api_token) + service_response = services_api.get_service(self.service_id) + service = service_response.service + + if service.app_id: + apps_api, _, _, _ = get_api_client(self.api_token) + app_response = apps_api.get_app(service.app_id) + app = app_response.app + if hasattr(app, "domains") and app.domains: + # Use the first public domain + return app.domains[0].name + return None + except (NotFoundException, ApiException, Exception): + return None + + def get_tcp_proxy_info(self) -> Optional[tuple[str, int]]: + """ + Get the TCP proxy host and port for the sandbox. + + Returns the TCP proxy host and port as a tuple (host, port) for direct TCP access to port 3031. + This is only available if enable_tcp_proxy=True was set when creating the sandbox. + + Returns: + Optional[tuple[str, int]]: A tuple of (host, port) or None if unavailable + """ + try: + from koyeb.api.exceptions import ApiException, NotFoundException + + from .utils import get_api_client + + _, services_api, _, _ = get_api_client(self.api_token) + service_response = services_api.get_service(self.service_id) + service = service_response.service + + if not service.active_deployment_id: + return None - return _get_sandbox_domain(self.service_id, self.api_token) + # Get the active deployment + from koyeb.api.api.deployments_api import DeploymentsApi + + deployments_api = DeploymentsApi() + deployments_api.api_client = services_api.api_client + deployment_response = deployments_api.get_deployment( + service.active_deployment_id + ) + deployment = deployment_response.deployment + + if not deployment.metadata or not deployment.metadata.proxy_ports: + return None + + # Find the proxy port for port 3031 + for proxy_port in deployment.metadata.proxy_ports: + if ( + proxy_port.port == 3031 + and proxy_port.host + and proxy_port.public_port + ): + return (proxy_port.host, proxy_port.public_port) + + return None + except (NotFoundException, ApiException, Exception): + return None def _get_sandbox_url(self) -> Optional[str]: """ @@ -341,6 +438,7 @@ async def create( api_token: Optional[str] = None, timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, + enable_tcp_proxy: bool = False, ) -> AsyncSandbox: """ Create a new sandbox instance with async support. @@ -362,6 +460,7 @@ async def create( - 0: Disable scale-to-zero (keep always-on) - int > 0: Deep sleep only (e.g., 600 for 600s deep sleep) - dict: Explicit configuration with {"light_sleep": 300, "deep_sleep": 600} + enable_tcp_proxy: If True, enables TCP proxy for direct TCP access to port 3031 Returns: AsyncSandbox: A new AsyncSandbox instance @@ -386,6 +485,7 @@ async def create( api_token=api_token, timeout=timeout, idle_timeout=idle_timeout, + enable_tcp_proxy=enable_tcp_proxy, ), ) @@ -430,6 +530,37 @@ async def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> boo return False + async def wait_tcp_proxy_ready( + self, timeout: int = 60, poll_interval: float = 2.0 + ) -> bool: + """ + Wait for TCP proxy to become ready and available asynchronously. + + Polls the deployment metadata until the TCP proxy information is available. + This is useful when enable_tcp_proxy=True was set during sandbox creation, + as the proxy information may not be immediately available. + + Args: + timeout: Maximum time to wait in seconds + poll_interval: Time between checks in seconds + + Returns: + bool: True if TCP proxy became ready, False if timeout + """ + start_time = time.time() + + while time.time() - start_time < timeout: + loop = asyncio.get_running_loop() + tcp_proxy_info = await loop.run_in_executor( + None, super().get_tcp_proxy_info + ) + if tcp_proxy_info is not None: + return True + + await asyncio.sleep(poll_interval) + + return False + async def delete(self) -> None: """Delete the sandbox instance asynchronously.""" loop = asyncio.get_running_loop() diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index af5bf31e..c2bef6e7 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -15,6 +15,7 @@ from koyeb.api.models.deployment_env import DeploymentEnv from koyeb.api.models.deployment_instance_type import DeploymentInstanceType from koyeb.api.models.deployment_port import DeploymentPort +from koyeb.api.models.deployment_proxy_port import DeploymentProxyPort from koyeb.api.models.deployment_route import DeploymentRoute from koyeb.api.models.deployment_scaling import DeploymentScaling from koyeb.api.models.deployment_scaling_target import DeploymentScalingTarget @@ -23,6 +24,7 @@ ) from koyeb.api.models.docker_source import DockerSource from koyeb.api.models.instance_status import InstanceStatus +from koyeb.api.models.proxy_port_protocol import ProxyPortProtocol from .executor_client import SandboxClient @@ -176,6 +178,24 @@ def create_koyeb_sandbox_ports(protocol: str = "http") -> List[DeploymentPort]: ] +def create_koyeb_sandbox_proxy_ports() -> List[DeploymentProxyPort]: + """ + Create TCP proxy port configuration for koyeb/sandbox image. + + Creates proxy port for direct TCP access: + - Port 3031 exposed via TCP proxy + + Returns: + List of DeploymentProxyPort objects configured for TCP proxy access + """ + return [ + DeploymentProxyPort( + port=3031, + protocol=ProxyPortProtocol.TCP, + ), + ] + + def create_koyeb_sandbox_routes() -> List[DeploymentRoute]: """ Create route configuration for koyeb/sandbox image to make it publicly accessible. @@ -347,6 +367,7 @@ def create_deployment_definition( routes: Optional[List[DeploymentRoute]] = None, idle_timeout: Optional[IdleTimeout] = None, light_sleep_enabled: bool = True, + enable_tcp_proxy: bool = False, ) -> DeploymentDefinition: """ Create deployment definition for a sandbox service. @@ -363,6 +384,7 @@ def create_deployment_definition( routes: List of routes for public access idle_timeout: Idle timeout configuration (see IdleTimeout type) light_sleep_enabled: Whether light sleep is enabled for the instance type (default: True) + enable_tcp_proxy: If True, enables TCP proxy for direct TCP access to port 3031 Returns: DeploymentDefinition object @@ -376,6 +398,11 @@ def create_deployment_definition( protocol = _validate_port_protocol(protocol) ports = create_koyeb_sandbox_ports(protocol) + # Create TCP proxy ports if enabled + proxy_ports = None + if enable_tcp_proxy: + proxy_ports = create_koyeb_sandbox_proxy_ports() + # Always use WEB type deployment_type = DeploymentDefinitionType.WEB @@ -399,6 +426,7 @@ def create_deployment_definition( docker=docker_source, env=env_vars, ports=ports, + proxy_ports=proxy_ports, routes=routes, instance_types=[DeploymentInstanceType(type=instance_type)], scalings=scalings, @@ -418,38 +446,6 @@ def get_sandbox_status( return InstanceStatus.ERROR -def _get_sandbox_domain( - service_id: str, api_token: Optional[str] = None -) -> Optional[str]: - """ - Internal function to get the public domain of a sandbox service. - - Returns the domain name (e.g., "app-name-org.koyeb.app") without protocol or path. - - Args: - service_id: The service ID - api_token: Optional API token (if None, will try to get from KOYEB_API_TOKEN env var) - - Returns: - Optional[str]: The domain name or None if unavailable - """ - try: - _, services_api, _, _ = get_api_client(api_token) - service_response = services_api.get_service(service_id) - service = service_response.service - - if service.app_id: - apps_api, _, _, _ = get_api_client(api_token) - app_response = apps_api.get_app(service.app_id) - app = app_response.app - if hasattr(app, "domains") and app.domains: - # Use the first public domain - return app.domains[0].name - return None - except (NotFoundException, ApiException, Exception): - return None - - def is_sandbox_healthy( instance_id: str, sandbox_url: str, From d241c43e5e811bfbef7c573c910495ac23254427 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 21:32:35 +0100 Subject: [PATCH 29/47] add port binding support to sandbox --- koyeb/sandbox/__init__.py | 3 +- koyeb/sandbox/executor_client.py | 49 ++++++++++++ koyeb/sandbox/sandbox.py | 129 +++++++++++++++++++++++++++++++ 3 files changed, 180 insertions(+), 1 deletion(-) diff --git a/koyeb/sandbox/__init__.py b/koyeb/sandbox/__init__.py index eb9d99df..db75d7af 100644 --- a/koyeb/sandbox/__init__.py +++ b/koyeb/sandbox/__init__.py @@ -16,7 +16,7 @@ SandboxExecutor, ) from .filesystem import FileInfo, SandboxFilesystem -from .sandbox import Sandbox, AsyncSandbox +from .sandbox import AsyncSandbox, ExposedPort, Sandbox from .utils import SandboxError __all__ = [ @@ -31,4 +31,5 @@ "CommandResult", "CommandStatus", "SandboxCommandError", + "ExposedPort", ] diff --git a/koyeb/sandbox/executor_client.py b/koyeb/sandbox/executor_client.py index 0c4f8a03..073db246 100644 --- a/koyeb/sandbox/executor_client.py +++ b/koyeb/sandbox/executor_client.py @@ -276,3 +276,52 @@ def list_dir(self, path: str) -> Dict[str, Any]: "POST", f"{self.base_url}/list_dir", json=payload, headers=self.headers ) return response.json() + + def bind_port(self, port: int) -> Dict[str, Any]: + """ + Bind a port to the TCP proxy for external access. + + Configures the TCP proxy to forward traffic to the specified port inside the sandbox. + This allows you to expose services running inside the sandbox to external connections. + + Args: + port: The port number to bind to (must be a valid port number) + + Returns: + Dict with success status, message, and port information + + Notes: + - Only one port can be bound at a time + - Binding a new port will override the previous binding + - The port must be available and accessible within the sandbox environment + """ + payload = {"port": str(port)} + response = self._request_with_retry( + "POST", f"{self.base_url}/bind_port", json=payload, headers=self.headers + ) + return response.json() + + def unbind_port(self, port: Optional[int] = None) -> Dict[str, Any]: + """ + Unbind a port from the TCP proxy. + + Removes the TCP proxy port binding, stopping traffic forwarding to the previously bound port. + + Args: + port: Optional port number to unbind. If provided, it must match the currently bound port. + If not provided, any existing binding will be removed. + + Returns: + Dict with success status and message + + Notes: + - If a port is specified and doesn't match the currently bound port, the request will fail + - After unbinding, the TCP proxy will no longer forward traffic + """ + payload = {} + if port is not None: + payload["port"] = str(port) + response = self._request_with_retry( + "POST", f"{self.base_url}/unbind_port", json=payload, headers=self.headers + ) + return response.json() diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 59e0ad9e..38820fcb 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -10,12 +10,14 @@ import os import secrets import time +from dataclasses import dataclass from typing import TYPE_CHECKING, Dict, List, Optional from koyeb.api.models.create_app import CreateApp from .utils import ( IdleTimeout, + SandboxError, build_env_vars, create_deployment_definition, create_docker_source, @@ -29,6 +31,17 @@ from .filesystem import AsyncSandboxFilesystem, SandboxFilesystem +@dataclass +class ExposedPort: + """Result of exposing a port via TCP proxy.""" + + port: int + exposed_at: str + + def __str__(self) -> str: + return f"ExposedPort(port={self.port}, exposed_at='{self.exposed_at}')" + + class Sandbox: """ Synchronous sandbox for running code on Koyeb infrastructure. @@ -418,6 +431,108 @@ def exec(self) -> SandboxExecutor: return SandboxExecutor(self) + def expose_port(self, port: int) -> ExposedPort: + """ + Expose a port to external connections via TCP proxy. + + Binds the specified internal port to the TCP proxy, allowing external + connections to reach services running on that port inside the sandbox. + Automatically unbinds any existing port before binding the new one. + + Args: + port: The internal port number to expose (must be a valid port number) + + Returns: + ExposedPort: An object with `port` and `exposed_at` attributes: + - port: The exposed port number + - exposed_at: The full URL with https:// protocol (e.g., "https://app-name-org.koyeb.app") + + Raises: + SandboxError: If the port binding operation fails + + Notes: + - Only one port can be exposed at a time + - Any existing port binding is automatically unbound before binding the new port + - The port must be available and accessible within the sandbox environment + - The TCP proxy is accessed via get_tcp_proxy_info() which returns (host, port) + + Example: + >>> result = sandbox.expose_port(8080) + >>> result.port + 8080 + >>> result.exposed_at + 'https://app-name-org.koyeb.app' + """ + from .executor_client import SandboxClient + + sandbox_url = self._get_sandbox_url() + if not sandbox_url: + raise SandboxError("Unable to get sandbox URL") + if not self.sandbox_secret: + raise SandboxError("Sandbox secret not available") + + client = SandboxClient(sandbox_url, self.sandbox_secret) + try: + # Always unbind any existing port first + try: + client.unbind_port() + except Exception: + # Ignore errors when unbinding - it's okay if no port was bound + pass + + # Now bind the new port + response = client.bind_port(port) + if not response.get("success", False): + error_msg = response.get("error", "Unknown error") + raise SandboxError(f"Failed to expose port {port}: {error_msg}") + + # Get domain for exposed_at + domain = self.get_domain() + if not domain: + raise SandboxError("Domain not available for exposed port") + + # Return the port from response if available, otherwise use the requested port + exposed_port = int(response.get("port", port)) + exposed_at = f"https://{domain}" + return ExposedPort(port=exposed_port, exposed_at=exposed_at) + except Exception as e: + if isinstance(e, SandboxError): + raise + raise SandboxError(f"Failed to expose port {port}: {str(e)}") from e + + def unexpose_port(self) -> None: + """ + Unexpose a port from external connections. + + Removes the TCP proxy port binding, stopping traffic forwarding to the + previously bound port. + + Raises: + SandboxError: If the port unbinding operation fails + + Notes: + - After unexposing, the TCP proxy will no longer forward traffic + - Safe to call even if no port is currently bound + """ + from .executor_client import SandboxClient + + sandbox_url = self._get_sandbox_url() + if not sandbox_url: + raise SandboxError("Unable to get sandbox URL") + if not self.sandbox_secret: + raise SandboxError("Sandbox secret not available") + + client = SandboxClient(sandbox_url, self.sandbox_secret) + try: + response = client.unbind_port() + if not response.get("success", False): + error_msg = response.get("error", "Unknown error") + raise SandboxError(f"Failed to unexpose port: {error_msg}") + except Exception as e: + if isinstance(e, SandboxError): + raise + raise SandboxError(f"Failed to unexpose port: {str(e)}") from e + class AsyncSandbox(Sandbox): """ @@ -590,3 +705,17 @@ def filesystem(self) -> AsyncSandboxFilesystem: from .filesystem import AsyncSandboxFilesystem return AsyncSandboxFilesystem(self) + + async def expose_port(self, port: int) -> ExposedPort: + """Expose a port to external connections via TCP proxy asynchronously.""" + import asyncio + + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, super().expose_port, port) + + async def unexpose_port(self) -> None: + """Unexpose a port from external connections asynchronously.""" + import asyncio + + loop = asyncio.get_running_loop() + await loop.run_in_executor(None, super().unexpose_port) From 421ef4acdfa9f1f051bb157656e8ae2e544d4811 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 21:40:39 +0100 Subject: [PATCH 30/47] add support for long-running processes --- koyeb/sandbox/executor_client.py | 92 +++++++++++++++ koyeb/sandbox/sandbox.py | 190 ++++++++++++++++++++++++++++++- 2 files changed, 281 insertions(+), 1 deletion(-) diff --git a/koyeb/sandbox/executor_client.py b/koyeb/sandbox/executor_client.py index 073db246..85c2b149 100644 --- a/koyeb/sandbox/executor_client.py +++ b/koyeb/sandbox/executor_client.py @@ -325,3 +325,95 @@ def unbind_port(self, port: Optional[int] = None) -> Dict[str, Any]: "POST", f"{self.base_url}/unbind_port", json=payload, headers=self.headers ) return response.json() + + def start_process( + self, cmd: str, cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None + ) -> Dict[str, Any]: + """ + Start a background process in the sandbox. + + Starts a long-running background process that continues executing even after + the API call completes. Use this for servers, workers, or other long-running tasks. + + Args: + cmd: The shell command to execute as a background process + cwd: Optional working directory for the process + env: Optional environment variables to set/override for the process + + Returns: + Dict with process id and success status: + - id: The unique process ID (UUID string) + - success: True if the process was started successfully + + Example: + >>> client = SandboxClient("http://localhost:8080", "secret") + >>> result = client.start_process("python -u server.py") + >>> process_id = result["id"] + >>> print(f"Started process: {process_id}") + """ + payload = {"cmd": cmd} + if cwd is not None: + payload["cwd"] = cwd + if env is not None: + payload["env"] = env + + response = self._request_with_retry( + "POST", f"{self.base_url}/start_process", json=payload, headers=self.headers + ) + return response.json() + + def kill_process(self, process_id: str) -> Dict[str, Any]: + """ + Kill a background process by its ID. + + Terminates a running background process. This sends a SIGTERM signal to the process, + allowing it to clean up gracefully. If the process doesn't terminate within a timeout, + it will be forcefully killed with SIGKILL. + + Args: + process_id: The unique process ID (UUID string) to kill + + Returns: + Dict with success status and error message if any + + Example: + >>> client = SandboxClient("http://localhost:8080", "secret") + >>> result = client.kill_process("550e8400-e29b-41d4-a716-446655440000") + >>> if result.get("success"): + ... print("Process killed successfully") + """ + payload = {"id": process_id} + response = self._request_with_retry( + "POST", f"{self.base_url}/kill_process", json=payload, headers=self.headers + ) + return response.json() + + def list_processes(self) -> Dict[str, Any]: + """ + List all background processes. + + Returns information about all currently running and recently completed background + processes. This includes both active processes and processes that have completed + (which remain in memory until server restart). + + Returns: + Dict with a list of processes: + - processes: List of process objects, each containing: + - id: Process ID (UUID string) + - cmd: The command that was executed + - status: Process status (e.g., "running", "completed") + - pid: OS process ID (if running) + - exit_code: Exit code (if completed) + - started_at: ISO 8601 timestamp when process started + - completed_at: ISO 8601 timestamp when process completed (if applicable) + + Example: + >>> client = SandboxClient("http://localhost:8080", "secret") + >>> result = client.list_processes() + >>> for process in result.get("processes", []): + ... print(f"{process['id']}: {process['cmd']} - {process['status']}") + """ + response = self._request_with_retry( + "GET", f"{self.base_url}/list_processes", headers=self.headers + ) + return response.json() diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 38820fcb..8b5ed5e0 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -11,7 +11,7 @@ import secrets import time from dataclasses import dataclass -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Dict, List, Optional from koyeb.api.models.create_app import CreateApp @@ -533,6 +533,164 @@ def unexpose_port(self) -> None: raise raise SandboxError(f"Failed to unexpose port: {str(e)}") from e + def launch_process( + self, cmd: str, cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None + ) -> str: + """ + Launch a background process in the sandbox. + + Starts a long-running background process that continues executing even after + the method returns. Use this for servers, workers, or other long-running tasks. + + Args: + cmd: The shell command to execute as a background process + cwd: Optional working directory for the process + env: Optional environment variables to set/override for the process + + Returns: + str: The unique process ID (UUID string) that can be used to manage the process + + Raises: + SandboxError: If the process launch fails + + Example: + >>> process_id = sandbox.launch_process("python -u server.py") + >>> print(f"Started process: {process_id}") + """ + from .executor_client import SandboxClient + + sandbox_url = self._get_sandbox_url() + if not sandbox_url: + raise SandboxError("Unable to get sandbox URL") + if not self.sandbox_secret: + raise SandboxError("Sandbox secret not available") + + client = SandboxClient(sandbox_url, self.sandbox_secret) + try: + response = client.start_process(cmd, cwd, env) + if not response.get("success", False): + error_msg = response.get("error", "Unknown error") + raise SandboxError(f"Failed to launch process: {error_msg}") + process_id = response.get("id") + if not process_id: + raise SandboxError("Process launched but no process ID returned") + return process_id + except Exception as e: + if isinstance(e, SandboxError): + raise + raise SandboxError(f"Failed to launch process: {str(e)}") from e + + def kill_process(self, process_id: str) -> None: + """ + Kill a background process by its ID. + + Terminates a running background process. This sends a SIGTERM signal to the process, + allowing it to clean up gracefully. If the process doesn't terminate within a timeout, + it will be forcefully killed with SIGKILL. + + Args: + process_id: The unique process ID (UUID string) to kill + + Raises: + SandboxError: If the process kill operation fails + + Example: + >>> sandbox.kill_process("550e8400-e29b-41d4-a716-446655440000") + """ + from .executor_client import SandboxClient + + sandbox_url = self._get_sandbox_url() + if not sandbox_url: + raise SandboxError("Unable to get sandbox URL") + if not self.sandbox_secret: + raise SandboxError("Sandbox secret not available") + + client = SandboxClient(sandbox_url, self.sandbox_secret) + try: + response = client.kill_process(process_id) + if not response.get("success", False): + error_msg = response.get("error", "Unknown error") + raise SandboxError(f"Failed to kill process {process_id}: {error_msg}") + except Exception as e: + if isinstance(e, SandboxError): + raise + raise SandboxError(f"Failed to kill process {process_id}: {str(e)}") from e + + def list_processes(self) -> List[Dict[str, Any]]: + """ + List all background processes. + + Returns information about all currently running and recently completed background + processes. This includes both active processes and processes that have completed + (which remain in memory until server restart). + + Returns: + List[Dict[str, Any]]: List of process dictionaries, each containing: + - id: Process ID (UUID string) + - cmd: The command that was executed + - status: Process status (e.g., "running", "completed") + - pid: OS process ID (if running) + - exit_code: Exit code (if completed) + - started_at: ISO 8601 timestamp when process started + - completed_at: ISO 8601 timestamp when process completed (if applicable) + + Raises: + SandboxError: If listing processes fails + + Example: + >>> processes = sandbox.list_processes() + >>> for process in processes: + ... print(f"{process['id']}: {process['cmd']} - {process['status']}") + """ + from .executor_client import SandboxClient + + sandbox_url = self._get_sandbox_url() + if not sandbox_url: + raise SandboxError("Unable to get sandbox URL") + if not self.sandbox_secret: + raise SandboxError("Sandbox secret not available") + + client = SandboxClient(sandbox_url, self.sandbox_secret) + try: + response = client.list_processes() + return response.get("processes", []) + except Exception as e: + if isinstance(e, SandboxError): + raise + raise SandboxError(f"Failed to list processes: {str(e)}") from e + + def kill_all_processes(self) -> int: + """ + Kill all running background processes. + + Convenience method that lists all processes and kills them all. This is useful + for cleanup operations. + + Returns: + int: The number of processes that were killed + + Raises: + SandboxError: If listing or killing processes fails + + Example: + >>> count = sandbox.kill_all_processes() + >>> print(f"Killed {count} processes") + """ + processes = self.list_processes() + killed_count = 0 + for process in processes: + process_id = process.get("id") + status = process.get("status", "") + # Only kill running processes + if process_id and status == "running": + try: + self.kill_process(process_id) + killed_count += 1 + except SandboxError: + # Continue killing other processes even if one fails + pass + return killed_count + class AsyncSandbox(Sandbox): """ @@ -719,3 +877,33 @@ async def unexpose_port(self) -> None: loop = asyncio.get_running_loop() await loop.run_in_executor(None, super().unexpose_port) + + async def launch_process( + self, cmd: str, cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None + ) -> str: + """Launch a background process in the sandbox asynchronously.""" + import asyncio + + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, super().launch_process, cmd, cwd, env) + + async def kill_process(self, process_id: str) -> None: + """Kill a background process by its ID asynchronously.""" + import asyncio + + loop = asyncio.get_running_loop() + await loop.run_in_executor(None, super().kill_process, process_id) + + async def list_processes(self) -> List[Dict[str, Any]]: + """List all background processes asynchronously.""" + import asyncio + + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, super().list_processes) + + async def kill_all_processes(self) -> int: + """Kill all running background processes asynchronously.""" + import asyncio + + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, super().kill_all_processes) From a9a7c8597cf475880f229f461239d0783a5bc456 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 21:55:48 +0100 Subject: [PATCH 31/47] type annotation fixes --- koyeb/sandbox/filesystem.py | 2 +- koyeb/sandbox/sandbox.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/koyeb/sandbox/filesystem.py b/koyeb/sandbox/filesystem.py index 6a156736..d5320ac6 100644 --- a/koyeb/sandbox/filesystem.py +++ b/koyeb/sandbox/filesystem.py @@ -64,7 +64,7 @@ def _get_client(self) -> SandboxClient: self._client = SandboxClient(sandbox_url, self.sandbox.sandbox_secret) return self._client - def _get_executor(self) -> SandboxExecutor: + def _get_executor(self) -> "SandboxExecutor": """Get or create SandboxExecutor instance""" if self._executor is None: from .exec import SandboxExecutor diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 8b5ed5e0..247c1606 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -418,14 +418,14 @@ def is_healthy(self) -> bool: ) @property - def filesystem(self) -> SandboxFilesystem: + def filesystem(self) -> "SandboxFilesystem": """Get filesystem operations interface""" from .filesystem import SandboxFilesystem return SandboxFilesystem(self) @property - def exec(self) -> SandboxExecutor: + def exec(self) -> "SandboxExecutor": """Get command execution interface""" from .exec import SandboxExecutor @@ -851,14 +851,14 @@ async def is_healthy(self) -> bool: return await loop.run_in_executor(None, super().is_healthy) @property - def exec(self) -> AsyncSandboxExecutor: + def exec(self) -> "AsyncSandboxExecutor": """Get async command execution interface""" from .exec import AsyncSandboxExecutor return AsyncSandboxExecutor(self) @property - def filesystem(self) -> AsyncSandboxFilesystem: + def filesystem(self) -> "AsyncSandboxFilesystem": """Get filesystem operations interface""" from .filesystem import AsyncSandboxFilesystem From 54a263176a7787ed33707cfbc68c5ba086b73764 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 22:18:11 +0100 Subject: [PATCH 32/47] add new examples --- examples/00_run_all.py | 124 +++++++++-------- .../02_create_sandbox_with_timing_async.py | 80 ++++++----- examples/13_background_processes.py | 97 +++++++++++++ examples/13_background_processes_async.py | 97 +++++++++++++ examples/14_expose_port.py | 121 ++++++++++++++++ examples/14_expose_port_async.py | 129 ++++++++++++++++++ koyeb/sandbox/sandbox.py | 12 +- 7 files changed, 560 insertions(+), 100 deletions(-) create mode 100755 examples/13_background_processes.py create mode 100755 examples/13_background_processes_async.py create mode 100755 examples/14_expose_port.py create mode 100755 examples/14_expose_port_async.py diff --git a/examples/00_run_all.py b/examples/00_run_all.py index b96d4d6a..01ec7598 100644 --- a/examples/00_run_all.py +++ b/examples/00_run_all.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 """Run all synchronous example scripts in order""" -import os import subprocess import sys import time @@ -11,60 +10,65 @@ def main(): # Get the examples directory examples_dir = Path(__file__).parent - + # Find all Python files, excluding this script and async variants - example_files = sorted([ - f for f in examples_dir.glob("*.py") - if f.name not in ["00_run_all.py", "00_run_all.py"] - and not f.name.endswith("_async.py") - ]) - + example_files = sorted( + [ + f + for f in examples_dir.glob("*.py") + if f.name not in ["00_run_all.py", "00_run_all.py"] + and not f.name.endswith("_async.py") + ] + ) + if not example_files: print("No example files found to run") return 0 - + print(f"Found {len(example_files)} example(s) to run\n") print("=" * 70) - + total_start = time.time() results = [] - + for example_file in example_files: example_name = example_file.name print(f"\n▶ Running: {example_name}") print("-" * 70) - + start_time = time.time() - + try: # Run the example script result = subprocess.run( [sys.executable, str(example_file)], capture_output=True, text=True, - timeout=60 # 60 second timeout per script + timeout=60, # 60 second timeout per script ) - + elapsed_time = time.time() - start_time - + # Print output if result.stdout: print(result.stdout) - + # Check for errors if result.returncode != 0: print(f"\n❌ ERROR in {example_name}") if result.stderr: print("STDERR:") print(result.stderr) - - results.append({ - "name": example_name, - "status": "FAILED", - "time": elapsed_time, - "error": result.stderr or "Non-zero exit code" - }) - + + results.append( + { + "name": example_name, + "status": "FAILED", + "time": elapsed_time, + "error": result.stderr or "Non-zero exit code", + } + ) + # Break on error print("\n" + "=" * 70) print("STOPPING: Error encountered") @@ -72,57 +76,59 @@ def main(): print_summary(results, time.time() - total_start) return 1 else: - results.append({ - "name": example_name, - "status": "PASSED", - "time": elapsed_time - }) + results.append( + {"name": example_name, "status": "PASSED", "time": elapsed_time} + ) print(f"✓ Completed in {elapsed_time:.2f}s") - + except subprocess.TimeoutExpired: elapsed_time = time.time() - start_time print(f"\n❌ TIMEOUT in {example_name} after {elapsed_time:.2f}s") - - results.append({ - "name": example_name, - "status": "TIMEOUT", - "time": elapsed_time, - "error": "Script exceeded 60 second timeout" - }) - + + results.append( + { + "name": example_name, + "status": "TIMEOUT", + "time": elapsed_time, + "error": "Script exceeded 60 second timeout", + } + ) + # Break on timeout print("\n" + "=" * 70) print("STOPPING: Timeout encountered") print("=" * 70) print_summary(results, time.time() - total_start) return 1 - + except Exception as e: elapsed_time = time.time() - start_time print(f"\n❌ EXCEPTION in {example_name}: {e}") - - results.append({ - "name": example_name, - "status": "ERROR", - "time": elapsed_time, - "error": str(e) - }) - + + results.append( + { + "name": example_name, + "status": "ERROR", + "time": elapsed_time, + "error": str(e), + } + ) + # Break on exception print("\n" + "=" * 70) print("STOPPING: Exception encountered") print("=" * 70) print_summary(results, time.time() - total_start) return 1 - + total_time = time.time() - total_start - + # Print summary print("\n" + "=" * 70) print("ALL EXAMPLES COMPLETED SUCCESSFULLY") print("=" * 70) print_summary(results, total_time) - + return 0 @@ -130,24 +136,26 @@ def print_summary(results, total_time): """Print execution summary""" print("\n📊 EXECUTION SUMMARY") print("-" * 70) - + for result in results: status_symbol = { "PASSED": "✓", "FAILED": "❌", "TIMEOUT": "⏱", - "ERROR": "❌" + "ERROR": "❌", }.get(result["status"], "?") - - print(f"{status_symbol} {result['name']:40s} {result['time']:>6.2f}s {result['status']}") - + + print( + f"{status_symbol} {result['name']:40s} {result['time']:>6.2f}s {result['status']}" + ) + if "error" in result: error_preview = result["error"].split("\n")[0][:50] print(f" Error: {error_preview}") - + print("-" * 70) print(f"Total execution time: {total_time:.2f}s") - + passed = sum(1 for r in results if r["status"] == "PASSED") total = len(results) print(f"Results: {passed}/{total} passed") diff --git a/examples/02_create_sandbox_with_timing_async.py b/examples/02_create_sandbox_with_timing_async.py index fd16df1a..c34f1b47 100644 --- a/examples/02_create_sandbox_with_timing_async.py +++ b/examples/02_create_sandbox_with_timing_async.py @@ -8,68 +8,71 @@ from collections import defaultdict from datetime import datetime - from koyeb import AsyncSandbox class TimingTracker: """Track timing information for operations""" + def __init__(self): self.operations = [] self.categories = defaultdict(list) - + def record(self, name, duration, category="general"): """Record an operation's timing""" - self.operations.append({ - 'name': name, - 'duration': duration, - 'category': category, - 'timestamp': datetime.now() - }) + self.operations.append( + { + "name": name, + "duration": duration, + "category": category, + "timestamp": datetime.now(), + } + ) self.categories[category].append(duration) - + def get_total_time(self): """Get total time for all operations""" - return sum(op['duration'] for op in self.operations) - + return sum(op["duration"] for op in self.operations) + def get_category_total(self, category): """Get total time for a specific category""" return sum(self.categories[category]) - + def print_recap(self): """Print a detailed recap of all timings""" - print("\n" + "="*70) + print("\n" + "=" * 70) print(" TIMING SUMMARY") - print("="*70) - + print("=" * 70) + if not self.operations: print("No operations recorded") return - + total_time = self.get_total_time() - + # Print individual operations print() - + for op in self.operations: - percentage = (op['duration'] / total_time * 100) if total_time > 0 else 0 + percentage = (op["duration"] / total_time * 100) if total_time > 0 else 0 bar_length = int(percentage / 2) # 50 chars = 100% bar = "█" * bar_length - - print(f" {op['name']:<30} {op['duration']:6.2f}s {percentage:5.1f}% {bar}") - + + print( + f" {op['name']:<30} {op['duration']:6.2f}s {percentage:5.1f}% {bar}" + ) + print() print("-" * 70) print(f" {'TOTAL':<30} {total_time:6.2f}s 100.0%") - print("="*70) + print("=" * 70) async def main(run_long_tests=False): - script_start = time.time() tracker = TimingTracker() - + print("Starting sandbox operations...") - + api_token = os.getenv("KOYEB_API_TOKEN") if not api_token: print("Error: KOYEB_API_TOKEN not set") @@ -93,8 +96,8 @@ async def main(run_long_tests=False): # Check status with timing print(" → Checking sandbox status...") status_start = time.time() - status = await sandbox.status() - is_healthy = await sandbox.is_healthy() + await sandbox.status() + await sandbox.is_healthy() status_duration = time.time() - status_start tracker.record("Status check", status_duration, "monitoring") print(f" ✓ took {status_duration:.1f}s") @@ -102,7 +105,7 @@ async def main(run_long_tests=False): # Test command execution with timing print(" → Executing initial test command...") exec_start = time.time() - result = await sandbox.exec("echo 'Sandbox is ready!'") + await sandbox.exec("echo 'Sandbox is ready!'") exec_duration = time.time() - exec_start tracker.record("Initial exec command", exec_duration, "execution") print(f" ✓ took {exec_duration:.1f}s") @@ -111,7 +114,7 @@ async def main(run_long_tests=False): # Long test 1: Install a package print(" → [LONG TEST] Installing a package...") install_start = time.time() - result = await sandbox.exec("pip install requests") + await sandbox.exec("pip install requests") install_duration = time.time() - install_start tracker.record("Package installation", install_duration, "long_tests") print(f" ✓ took {install_duration:.1f}s") @@ -119,7 +122,9 @@ async def main(run_long_tests=False): # Long test 2: Run a computation print(" → [LONG TEST] Running computation...") compute_start = time.time() - result = await sandbox.exec("python -c 'import time; sum(range(10000000)); time.sleep(2)'") + await sandbox.exec( + "python -c 'import time; sum(range(10000000)); time.sleep(2)'" + ) compute_duration = time.time() - compute_start tracker.record("Heavy computation", compute_duration, "long_tests") print(f" ✓ took {compute_duration:.1f}s") @@ -131,12 +136,15 @@ async def main(run_long_tests=False): await sandbox.status() await asyncio.sleep(0.5) multi_check_duration = time.time() - multi_check_start - tracker.record("Multiple status checks (5x)", multi_check_duration, "long_tests") + tracker.record( + "Multiple status checks (5x)", multi_check_duration, "long_tests" + ) print(f" ✓ took {multi_check_duration:.1f}s") except Exception as e: print(f"\n✗ Error occurred: {e}") import traceback + traceback.print_exc() finally: if sandbox: @@ -146,9 +154,9 @@ async def main(run_long_tests=False): delete_duration = time.time() - delete_start tracker.record("Sandbox deletion", delete_duration, "cleanup") print(f" ✓ took {delete_duration:.1f}s") - + print("\n✓ All operations completed") - + # Print detailed recap tracker.print_recap() @@ -160,8 +168,8 @@ async def main(run_long_tests=False): parser.add_argument( "--long", action="store_true", - help="Run longer tests (package installation, computation, etc.)" + help="Run longer tests (package installation, computation, etc.)", ) - + args = parser.parse_args() asyncio.run(main(run_long_tests=args.long)) diff --git a/examples/13_background_processes.py b/examples/13_background_processes.py new file mode 100755 index 00000000..37627298 --- /dev/null +++ b/examples/13_background_processes.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 +"""Background process management""" + +import os +import time + +from koyeb import Sandbox + + +def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = Sandbox.create( + image="koyeb/sandbox", + name="background-processes", + wait_ready=True, + api_token=api_token, + ) + + print("Launching background processes...") + + # Launch a long-running process + process_id_1 = sandbox.launch_process( + "python3 -c 'import time; [print(f\"Process 1: {i}\") or time.sleep(1) for i in range(10)]'" + ) + print(f"Launched process 1: {process_id_1}") + + # Launch another process with a different command + process_id_2 = sandbox.launch_process( + "python3 -c 'import time; [print(f\"Process 2: {i}\") or time.sleep(1) for i in range(5)]'" + ) + print(f"Launched process 2: {process_id_2}") + + # Wait a bit for processes to start + time.sleep(2) + + # List all processes + print("\nListing all processes:") + processes = sandbox.list_processes() + for process in processes: + print(f" ID: {process.get('id', 'N/A')}") + print(f" Status: {process.get('status', 'N/A')}") + if process.get("pid"): + print(f" PID: {process.get('pid')}") + print() + + # Kill a specific process + print(f"Killing process {process_id_2}...") + sandbox.kill_process(process_id_2) + print("Process killed") + + # Wait a bit + time.sleep(1) + + # List processes again + print("\nListing processes after kill:") + processes = sandbox.list_processes() + for process in processes: + print(f" ID: {process.get('id', 'N/A')}") + print(f" Status: {process.get('status', 'N/A')}") + print() + + # Launch a few more processes + process_id_3 = sandbox.launch_process("sleep 5") + process_id_4 = sandbox.launch_process("sleep 5") + print(f"Launched processes 3 and 4: {process_id_3}, {process_id_4}") + + # Wait a bit + time.sleep(1) + + # Kill all running processes + print("\nKilling all running processes...") + killed_count = sandbox.kill_all_processes() + print(f"Killed {killed_count} processes") + + # Final list + print("\nFinal process list:") + processes = sandbox.list_processes() + for process in processes: + print(f" ID: {process.get('id', 'N/A')}") + print(f" Status: {process.get('status', 'N/A')}") + print() + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + sandbox.delete() + + +if __name__ == "__main__": + main() diff --git a/examples/13_background_processes_async.py b/examples/13_background_processes_async.py new file mode 100755 index 00000000..82043d34 --- /dev/null +++ b/examples/13_background_processes_async.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 +"""Background process management (async variant)""" + +import asyncio +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="koyeb/sandbox", + name="background-processes", + wait_ready=True, + api_token=api_token, + ) + + print("Launching background processes...") + + # Launch a long-running process + process_id_1 = await sandbox.launch_process( + "python3 -c 'import time; [print(f\"Process 1: {i}\") or time.sleep(1) for i in range(10)]'" + ) + print(f"Launched process 1: {process_id_1}") + + # Launch another process with a different command + process_id_2 = await sandbox.launch_process( + "python3 -c 'import time; [print(f\"Process 2: {i}\") or time.sleep(1) for i in range(5)]'" + ) + print(f"Launched process 2: {process_id_2}") + + # Wait a bit for processes to start + await asyncio.sleep(2) + + # List all processes + print("\nListing all processes:") + processes = await sandbox.list_processes() + for process in processes: + print(f" ID: {process.get('id', 'N/A')}") + print(f" Status: {process.get('status', 'N/A')}") + if process.get("pid"): + print(f" PID: {process.get('pid')}") + print() + + # Kill a specific process + print(f"Killing process {process_id_2}...") + await sandbox.kill_process(process_id_2) + print("Process killed") + + # Wait a bit + await asyncio.sleep(1) + + # List processes again + print("\nListing processes after kill:") + processes = await sandbox.list_processes() + for process in processes: + print(f" ID: {process.get('id', 'N/A')}") + print(f" Status: {process.get('status', 'N/A')}") + print() + + # Launch a few more processes + process_id_3 = await sandbox.launch_process("sleep 5") + process_id_4 = await sandbox.launch_process("sleep 5") + print(f"Launched processes 3 and 4: {process_id_3}, {process_id_4}") + + # Wait a bit + await asyncio.sleep(1) + + # Kill all running processes + print("\nKilling all running processes...") + killed_count = await sandbox.kill_all_processes() + print(f"Killed {killed_count} processes") + + # Final list + print("\nFinal process list:") + processes = await sandbox.list_processes() + for process in processes: + print(f" ID: {process.get('id', 'N/A')}") + print(f" Status: {process.get('status', 'N/A')}") + print() + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/14_expose_port.py b/examples/14_expose_port.py new file mode 100755 index 00000000..330110e2 --- /dev/null +++ b/examples/14_expose_port.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python3 +"""Port exposure via TCP proxy""" + +import os +import time + +import requests + +from koyeb import Sandbox + + +def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = Sandbox.create( + image="koyeb/sandbox", + name="expose-port", + wait_ready=True, + api_token=api_token, + ) + + # Create a test file to serve + print("\nCreating test file...") + sandbox.filesystem.write_file( + "/tmp/test.html", "

Hello from Sandbox!

Port 8080

" + ) + print("Test file created") + + # Start a simple HTTP server on port 8080 + print("\nStarting HTTP server on port 8080...") + process_id = sandbox.launch_process( + "python3 -m http.server 8080", + cwd="/tmp", + ) + print(f"Server started with process ID: {process_id}") + + # Wait for server to start + print("Waiting for server to start...") + time.sleep(3) + + # Expose port 8080 + print("\nExposing port 8080...") + exposed = sandbox.expose_port(8080) + print(f"Port exposed: {exposed.port}") + print(f"Exposed at: {exposed.exposed_at}") + + # Wait a bit for the port to be ready + print("Waiting for port to be ready...") + time.sleep(2) + + # Make a request to verify it's working + print("\nMaking HTTP request to verify port exposure...") + try: + response = requests.get(f"{exposed.exposed_at}/test.html", timeout=10) + response.raise_for_status() + print(f"✓ Request successful! Status: {response.status_code}") + print(f"✓ Response content: {response.text.strip()}") + except requests.RequestException as e: + print(f"⚠ Request failed: {e}") + print("Note: Port may still be propagating. Try again in a few seconds.") + + # List processes to show the server is running + print("\nRunning processes:") + processes = sandbox.list_processes() + for process in processes: + if process.get("status") == "running": + print(f" {process.get('id')}: {process.get('status')}") + + # Switch to a different port (e.g., 8081) + print("\nSwitching to port 8081...") + # Create a different test file for port 8081 + sandbox.filesystem.write_file( + "/tmp/test2.html", "

Hello from Sandbox!

Port 8081

" + ) + # Start a new server on 8081 + sandbox.launch_process( + "python3 -m http.server 8081", + cwd="/tmp", + ) + print("Waiting for server to start...") + time.sleep(3) + + # Expose the new port (this will automatically unbind the previous port) + exposed_2 = sandbox.expose_port(8081) + print(f"Port exposed: {exposed_2.port}") + print(f"Exposed at: {exposed_2.exposed_at}") + + # Wait a bit for the port to be ready + print("Waiting for port to be ready...") + time.sleep(2) + + # Make a request to verify the new port is working + print("\nMaking HTTP request to verify port 8081...") + try: + response = requests.get(f"{exposed_2.exposed_at}/test2.html", timeout=10) + response.raise_for_status() + print(f"✓ Request successful! Status: {response.status_code}") + print(f"✓ Response content: {response.text.strip()}") + except requests.RequestException as e: + print(f"⚠ Request failed: {e}") + print("Note: Port may still be propagating. Try again in a few seconds.") + + # Unexpose the port + print("\nUnexposing port...") + sandbox.unexpose_port() + print("Port unexposed") + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + sandbox.delete() + + +if __name__ == "__main__": + main() diff --git a/examples/14_expose_port_async.py b/examples/14_expose_port_async.py new file mode 100755 index 00000000..1abf79c7 --- /dev/null +++ b/examples/14_expose_port_async.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python3 +"""Port exposure via TCP proxy (async variant)""" + +import asyncio +import os + +import requests + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + sandbox = None + try: + sandbox = await AsyncSandbox.create( + image="koyeb/sandbox", + name="expose-port", + wait_ready=True, + api_token=api_token, + ) + + # Create a test file to serve + print("\nCreating test file...") + await sandbox.filesystem.write_file( + "/tmp/test.html", "

Hello from Sandbox!

Port 8080

" + ) + print("Test file created") + + # Start a simple HTTP server on port 8080 + print("\nStarting HTTP server on port 8080...") + process_id = await sandbox.launch_process( + "python3 -m http.server 8080", + cwd="/tmp", + ) + print(f"Server started with process ID: {process_id}") + + # Wait for server to start + print("Waiting for server to start...") + await asyncio.sleep(3) + + # Expose port 8080 + print("\nExposing port 8080...") + exposed = await sandbox.expose_port(8080) + print(f"Port exposed: {exposed.port}") + print(f"Exposed at: {exposed.exposed_at}") + + # Wait a bit for the port to be ready + print("Waiting for port to be ready...") + await asyncio.sleep(2) + + # Make a request to verify it's working + print("\nMaking HTTP request to verify port exposure...") + try: + loop = asyncio.get_running_loop() + response = await loop.run_in_executor( + None, requests.get, f"{exposed.exposed_at}/test.html" + ) + response.raise_for_status() + print(f"✓ Request successful! Status: {response.status_code}") + print(f"✓ Response content: {response.text.strip()}") + except Exception as e: + print(f"⚠ Request failed: {e}") + print("Note: Port may still be propagating. Try again in a few seconds.") + + # List processes to show the server is running + print("\nRunning processes:") + processes = await sandbox.list_processes() + for process in processes: + if process.get("status") == "running": + print( + f" {process.get('id')}: {process.get('cmd')} - {process.get('status')}" + ) + + # Switch to a different port (e.g., 8081) + print("\nSwitching to port 8081...") + # Create a different test file for port 8081 + await sandbox.filesystem.write_file( + "/tmp/test2.html", "

Hello from Sandbox!

Port 8081

" + ) + # Start a new server on 8081 + await sandbox.launch_process( + "python3 -m http.server 8081", + cwd="/tmp", + ) + print("Waiting for server to start...") + await asyncio.sleep(3) + + # Expose the new port (this will automatically unbind the previous port) + exposed_2 = await sandbox.expose_port(8081) + print(f"Port exposed: {exposed_2.port}") + print(f"Exposed at: {exposed_2.exposed_at}") + + # Wait a bit for the port to be ready + print("Waiting for port to be ready...") + await asyncio.sleep(2) + + # Make a request to verify the new port is working + print("\nMaking HTTP request to verify port 8081...") + try: + loop = asyncio.get_running_loop() + response = await loop.run_in_executor( + None, requests.get, f"{exposed_2.exposed_at}/test2.html" + ) + response.raise_for_status() + print(f"✓ Request successful! Status: {response.status_code}") + print(f"✓ Response content: {response.text.strip()}") + except Exception as e: + print(f"⚠ Request failed: {e}") + print("Note: Port may still be propagating. Try again in a few seconds.") + + # Unexpose the port + print("\nUnexposing port...") + await sandbox.unexpose_port() + print("Port unexposed") + + except Exception as e: + print(f"Error: {e}") + finally: + if sandbox: + await sandbox.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 247c1606..9a420765 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -568,13 +568,13 @@ def launch_process( client = SandboxClient(sandbox_url, self.sandbox_secret) try: response = client.start_process(cmd, cwd, env) - if not response.get("success", False): - error_msg = response.get("error", "Unknown error") - raise SandboxError(f"Failed to launch process: {error_msg}") + # Check for process ID - if it exists, the process was launched successfully process_id = response.get("id") - if not process_id: - raise SandboxError("Process launched but no process ID returned") - return process_id + if process_id: + return process_id + # If no ID, check for explicit error + error_msg = response.get("error", response.get("message", "Unknown error")) + raise SandboxError(f"Failed to launch process: {error_msg}") except Exception as e: if isinstance(e, SandboxError): raise From 63e0aa964b7f3753dc3508b3bf578f4a4469463a Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Tue, 4 Nov 2025 23:05:11 +0100 Subject: [PATCH 33/47] improve readiness and code quality --- examples/README.md | 23 ++- koyeb/sandbox/__init__.py | 3 +- koyeb/sandbox/exec.py | 88 ++++++--- koyeb/sandbox/executor_client.py | 91 ++++++++- koyeb/sandbox/filesystem.py | 311 ++++++++++++++++++------------- koyeb/sandbox/sandbox.py | 244 +++++++++++++----------- koyeb/sandbox/utils.py | 144 +++++++++++++- 7 files changed, 626 insertions(+), 278 deletions(-) diff --git a/examples/README.md b/examples/README.md index 8be5a4a6..ec95e5ca 100644 --- a/examples/README.md +++ b/examples/README.md @@ -15,16 +15,19 @@ uv run python examples/01_create_sandbox.py ## Examples - **01_create_sandbox.py** - Create and manage sandbox instances -- **02_basic_commands.py** - Basic command execution -- **03_streaming_output.py** - Real-time streaming output -- **04_environment_variables.py** - Environment variable configuration -- **05_working_directory.py** - Working directory management -- **06_file_operations.py** - File read/write operations -- **07_directory_operations.py** - Directory management -- **08_binary_files.py** - Binary file handling -- **09_batch_operations.py** - Batch file operations -- **10_upload_download.py** - File upload and download -- **11_file_manipulation.py** - File manipulation operations +- **02_create_sandbox_with_timing.py** - Create sandbox with timing measurements +- **03_basic_commands.py** - Basic command execution +- **04_streaming_output.py** - Real-time streaming output +- **05_environment_variables.py** - Environment variable configuration +- **06_working_directory.py** - Working directory management +- **07_file_operations.py** - File read/write operations +- **08_directory_operations.py** - Directory management +- **09_binary_files.py** - Binary file handling +- **10_batch_operations.py** - Batch file operations +- **11_upload_download.py** - File upload and download +- **12_file_manipulation.py** - File manipulation operations +- **13_background_processes.py** - Background process management (launch, list, kill) +- **14_expose_port.py** - Port exposure via TCP proxy with HTTP verification ## Basic Usage diff --git a/koyeb/sandbox/__init__.py b/koyeb/sandbox/__init__.py index db75d7af..ac993c87 100644 --- a/koyeb/sandbox/__init__.py +++ b/koyeb/sandbox/__init__.py @@ -16,7 +16,7 @@ SandboxExecutor, ) from .filesystem import FileInfo, SandboxFilesystem -from .sandbox import AsyncSandbox, ExposedPort, Sandbox +from .sandbox import AsyncSandbox, ExposedPort, ProcessInfo, Sandbox from .utils import SandboxError __all__ = [ @@ -32,4 +32,5 @@ "CommandStatus", "SandboxCommandError", "ExposedPort", + "ProcessInfo", ] diff --git a/koyeb/sandbox/exec.py b/koyeb/sandbox/exec.py index a3058253..2ad4e786 100644 --- a/koyeb/sandbox/exec.py +++ b/koyeb/sandbox/exec.py @@ -11,10 +11,10 @@ import time from dataclasses import dataclass from enum import Enum -from typing import TYPE_CHECKING, Callable, Dict, List, Optional +from typing import TYPE_CHECKING, Any, AsyncIterator, Callable, Dict, List, Optional from .executor_client import SandboxClient -from .utils import SandboxError +from .utils import SandboxError, create_sandbox_client if TYPE_CHECKING: from .sandbox import Sandbox @@ -75,11 +75,9 @@ def _get_client(self) -> SandboxClient: """Get or create SandboxClient instance""" if self._client is None: sandbox_url = self.sandbox._get_sandbox_url() - if not sandbox_url: - raise SandboxError("Unable to get sandbox URL") - if not self.sandbox.sandbox_secret: - raise SandboxError("Sandbox secret not available") - self._client = SandboxClient(sandbox_url, self.sandbox.sandbox_secret) + self._client = create_sandbox_client( + sandbox_url, self.sandbox.sandbox_secret + ) return self._client def __call__( @@ -98,7 +96,7 @@ def __call__( command: Command to execute as a string (e.g., "python -c 'print(2+2)'") cwd: Working directory for the command env: Environment variables for the command - timeout: Command timeout in seconds (not currently enforced, reserved for future use) + timeout: Command timeout in seconds (enforced for HTTP requests) on_stdout: Optional callback for streaming stdout chunks on_stderr: Optional callback for streaming stderr chunks @@ -128,7 +126,9 @@ def __call__( try: client = self._get_client() - for event in client.run_streaming(cmd=command, cwd=cwd, env=env): + for event in client.run_streaming( + cmd=command, cwd=cwd, env=env, timeout=float(timeout) + ): if "stream" in event: stream_type = event["stream"] data = event["data"] @@ -179,7 +179,7 @@ def __call__( # Use regular run for non-streaming execution try: client = self._get_client() - response = client.run(cmd=command, cwd=cwd, env=env) + response = client.run(cmd=command, cwd=cwd, env=env, timeout=float(timeout)) stdout = response.get("stdout", "") stderr = response.get("stderr", "") @@ -230,7 +230,7 @@ async def __call__( command: Command to execute as a string (e.g., "python -c 'print(2+2)'") cwd: Working directory for the command env: Environment variables for the command - timeout: Command timeout in seconds (not currently enforced, reserved for future use) + timeout: Command timeout in seconds (enforced for HTTP requests) on_stdout: Optional callback for streaming stdout chunks on_stderr: Optional callback for streaming stderr chunks @@ -260,18 +260,57 @@ async def __call__( try: client = self._get_client() - # Run streaming in executor to avoid blocking - loop = asyncio.get_running_loop() - - def stream_command(): - events = [] - for event in client.run_streaming(cmd=command, cwd=cwd, env=env): - events.append(event) - return events - events = await loop.run_in_executor(None, stream_command) - - for event in events: + # Create async generator for streaming events + async def stream_events() -> AsyncIterator[Dict[str, Any]]: + """Async generator that yields events as they arrive.""" + import queue + from threading import Thread + + event_queue: queue.Queue[Dict[str, Any] | None] = queue.Queue() + done = False + + def sync_stream(): + """Synchronous generator for streaming.""" + nonlocal done + try: + for event in client.run_streaming( + cmd=command, cwd=cwd, env=env, timeout=float(timeout) + ): + event_queue.put(event) + event_queue.put(None) # Sentinel + except Exception as e: + event_queue.put({"error": str(e)}) + event_queue.put(None) + finally: + done = True + + # Start streaming in a thread + thread = Thread(target=sync_stream, daemon=True) + thread.start() + + # Yield events as they arrive + while True: + try: + # Use get_nowait to avoid blocking in executor + event = event_queue.get_nowait() + if event is None: + # Sentinel received, streaming is complete + break + yield event + except queue.Empty: + # Check if thread is done and queue is empty + if done and event_queue.empty(): + break + # Wait a bit before checking again + await asyncio.sleep(0.01) + continue + + # Wait for thread to complete (should be done by now) + thread.join(timeout=1.0) + + # Process events as they arrive + async for event in stream_events(): if "stream" in event: stream_type = event["stream"] data = event["data"] @@ -325,7 +364,10 @@ def stream_command(): try: client = self._get_client() response = await loop.run_in_executor( - None, lambda: client.run(cmd=command, cwd=cwd, env=env) + None, + lambda: client.run( + cmd=command, cwd=cwd, env=env, timeout=float(timeout) + ), ) stdout = response.get("stdout", "") diff --git a/koyeb/sandbox/executor_client.py b/koyeb/sandbox/executor_client.py index 85c2b149..feb71eab 100644 --- a/koyeb/sandbox/executor_client.py +++ b/koyeb/sandbox/executor_client.py @@ -5,29 +5,61 @@ """ import json +import logging import time from typing import Any, Dict, Iterator, Optional import requests +from .utils import DEFAULT_HTTP_TIMEOUT + +logger = logging.getLogger(__name__) + class SandboxClient: """Client for the Sandbox Executor API.""" - def __init__(self, base_url: str, secret: str): + def __init__( + self, base_url: str, secret: str, timeout: float = DEFAULT_HTTP_TIMEOUT + ): """ Initialize the Sandbox Client. Args: base_url: The base URL of the sandbox server (e.g., 'http://localhost:8080') secret: The authentication secret/token + timeout: Request timeout in seconds (default: 30) """ self.base_url = base_url.rstrip("/") self.secret = secret + self.timeout = timeout self.headers = { "Authorization": f"Bearer {secret}", "Content-Type": "application/json", } + # Use session for connection pooling + self._session = requests.Session() + self._session.headers.update(self.headers) + self._closed = False + + def close(self) -> None: + """Close the HTTP session and release resources.""" + if not self._closed and hasattr(self, "_session"): + self._session.close() + self._closed = True + + def __enter__(self): + """Context manager entry - returns self.""" + return self + + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + """Context manager exit - automatically closes the session.""" + self.close() + + def __del__(self): + """Clean up session on deletion (fallback, not guaranteed to run).""" + if not self._closed: + self.close() def _request_with_retry( self, @@ -56,12 +88,20 @@ def _request_with_retry( backoff = initial_backoff last_exception = None + # Set default timeout if not provided + if "timeout" not in kwargs: + kwargs["timeout"] = self.timeout + for attempt in range(max_retries + 1): try: - response = requests.request(method, url, **kwargs) + # Use session for connection pooling + response = self._session.request(method, url, **kwargs) # If we get a 503, retry with backoff if response.status_code == 503 and attempt < max_retries: + logger.debug( + f"Received 503 error, retrying... (attempt {attempt + 1}/{max_retries + 1})" + ) time.sleep(backoff) backoff *= 2 # Exponential backoff continue @@ -70,12 +110,25 @@ def _request_with_retry( return response except requests.HTTPError as e: - if e.response.status_code == 503 and attempt < max_retries: + if ( + e.response + and e.response.status_code == 503 + and attempt < max_retries + ): + logger.debug( + f"Received 503 error, retrying... (attempt {attempt + 1}/{max_retries + 1})" + ) time.sleep(backoff) backoff *= 2 last_exception = e continue raise + except requests.Timeout as e: + logger.warning(f"Request timeout after {self.timeout}s: {e}") + raise + except requests.RequestException as e: + logger.warning(f"Request failed: {e}") + raise # If we exhausted all retries, raise the last exception if last_exception: @@ -87,13 +140,21 @@ def health(self) -> Dict[str, str]: Returns: Dict with status information + + Raises: + requests.HTTPError: If the health check fails """ - response = requests.get(f"{self.base_url}/health") - response.raise_for_status() + response = self._request_with_retry( + "GET", f"{self.base_url}/health", timeout=self.timeout + ) return response.json() def run( - self, cmd: str, cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None + self, + cmd: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + timeout: Optional[float] = None, ) -> Dict[str, Any]: """ Execute a shell command in the sandbox. @@ -102,6 +163,7 @@ def run( cmd: The shell command to execute cwd: Optional working directory for command execution env: Optional environment variables to set/override + timeout: Optional timeout in seconds for the request Returns: Dict containing stdout, stderr, error (if any), and exit code @@ -112,13 +174,22 @@ def run( if env is not None: payload["env"] = env + request_timeout = timeout if timeout is not None else self.timeout response = self._request_with_retry( - "POST", f"{self.base_url}/run", json=payload, headers=self.headers + "POST", + f"{self.base_url}/run", + json=payload, + headers=self.headers, + timeout=request_timeout, ) return response.json() def run_streaming( - self, cmd: str, cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None + self, + cmd: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + timeout: Optional[float] = None, ) -> Iterator[Dict[str, Any]]: """ Execute a shell command in the sandbox and stream the output in real-time. @@ -131,6 +202,7 @@ def run_streaming( cmd: The shell command to execute cwd: Optional working directory for command execution env: Optional environment variables to set/override + timeout: Optional timeout in seconds for the streaming request Yields: Dict events with the following types: @@ -158,11 +230,12 @@ def run_streaming( if env is not None: payload["env"] = env - response = requests.post( + response = self._session.post( f"{self.base_url}/run_streaming", json=payload, headers=self.headers, stream=True, + timeout=timeout if timeout is not None else self.timeout, ) response.raise_for_status() diff --git a/koyeb/sandbox/filesystem.py b/koyeb/sandbox/filesystem.py index d5320ac6..873d429a 100644 --- a/koyeb/sandbox/filesystem.py +++ b/koyeb/sandbox/filesystem.py @@ -7,13 +7,18 @@ from __future__ import annotations -import asyncio import os from dataclasses import dataclass from typing import TYPE_CHECKING, Dict, List, Union from .executor_client import SandboxClient -from .utils import SandboxError +from .utils import ( + SandboxError, + check_error_message, + create_sandbox_client, + escape_shell_arg, + run_sync_in_executor, +) if TYPE_CHECKING: from .exec import SandboxExecutor @@ -24,11 +29,11 @@ class SandboxFilesystemError(SandboxError): """Base exception for filesystem operations""" -class FileNotFoundError(SandboxFilesystemError): +class SandboxFileNotFoundError(SandboxFilesystemError): """Raised when file or directory not found""" -class FileExistsError(SandboxFilesystemError): +class SandboxFileExistsError(SandboxFilesystemError): """Raised when file already exists""" @@ -57,11 +62,9 @@ def _get_client(self) -> SandboxClient: """Get or create SandboxClient instance""" if self._client is None: sandbox_url = self.sandbox._get_sandbox_url() - if not sandbox_url: - raise SandboxError("Unable to get sandbox URL") - if not self.sandbox.sandbox_secret: - raise SandboxError("Sandbox secret not available") - self._client = SandboxClient(sandbox_url, self.sandbox.sandbox_secret) + self._client = create_sandbox_client( + sandbox_url, self.sandbox.sandbox_secret + ) return self._client def _get_executor(self) -> "SandboxExecutor": @@ -91,9 +94,14 @@ def write_file( content_str = content try: - client.write_file(path, content_str) + response = client.write_file(path, content_str) + if response.get("error"): + error_msg = response.get("error", "Unknown error") + raise SandboxFilesystemError(f"Failed to write file: {error_msg}") except Exception as e: - raise SandboxFilesystemError(f"Failed to write file: {str(e)}") + if isinstance(e, SandboxFilesystemError): + raise + raise SandboxFilesystemError(f"Failed to write file: {str(e)}") from e def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: """ @@ -110,13 +118,20 @@ def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: try: response = client.read_file(path) + if response.get("error"): + error_msg = response.get("error", "Unknown error") + if check_error_message(error_msg, "NO_SUCH_FILE"): + raise SandboxFileNotFoundError(f"File not found: {path}") + raise SandboxFilesystemError(f"Failed to read file: {error_msg}") content = response.get("content", "") return FileInfo(content=content, encoding=encoding) + except (SandboxFileNotFoundError, SandboxFilesystemError): + raise except Exception as e: error_msg = str(e) - if "not found" in error_msg.lower(): - raise FileNotFoundError(f"File not found: {path}") - raise SandboxFilesystemError(f"Failed to read file: {error_msg}") + if check_error_message(error_msg, "NO_SUCH_FILE"): + raise SandboxFileNotFoundError(f"File not found: {path}") from e + raise SandboxFilesystemError(f"Failed to read file: {error_msg}") from e def mkdir(self, path: str, recursive: bool = False) -> None: """ @@ -129,12 +144,21 @@ def mkdir(self, path: str, recursive: bool = False) -> None: client = self._get_client() try: - client.make_dir(path) + response = client.make_dir(path) + if response.get("error"): + error_msg = response.get("error", "Unknown error") + if check_error_message(error_msg, "FILE_EXISTS"): + raise SandboxFileExistsError(f"Directory already exists: {path}") + raise SandboxFilesystemError(f"Failed to create directory: {error_msg}") + except (SandboxFileExistsError, SandboxFilesystemError): + raise except Exception as e: error_msg = str(e) - if "exists" in error_msg.lower(): - raise FileExistsError(f"Directory already exists: {path}") - raise SandboxFilesystemError(f"Failed to create directory: {error_msg}") + if check_error_message(error_msg, "FILE_EXISTS"): + raise SandboxFileExistsError(f"Directory already exists: {path}") from e + raise SandboxFilesystemError( + f"Failed to create directory: {error_msg}" + ) from e def list_dir(self, path: str = ".") -> List[str]: """ @@ -150,13 +174,22 @@ def list_dir(self, path: str = ".") -> List[str]: try: response = client.list_dir(path) + if response.get("error"): + error_msg = response.get("error", "Unknown error") + if check_error_message(error_msg, "NO_SUCH_FILE"): + raise SandboxFileNotFoundError(f"Directory not found: {path}") + raise SandboxFilesystemError(f"Failed to list directory: {error_msg}") entries = response.get("entries", []) return entries + except (SandboxFileNotFoundError, SandboxFilesystemError): + raise except Exception as e: error_msg = str(e) - if "not found" in error_msg.lower(): - raise FileNotFoundError(f"Directory not found: {path}") - raise SandboxFilesystemError(f"Failed to list directory: {error_msg}") + if check_error_message(error_msg, "NO_SUCH_FILE"): + raise SandboxFileNotFoundError(f"Directory not found: {path}") from e + raise SandboxFilesystemError( + f"Failed to list directory: {error_msg}" + ) from e def delete_file(self, path: str) -> None: """ @@ -168,12 +201,19 @@ def delete_file(self, path: str) -> None: client = self._get_client() try: - client.delete_file(path) + response = client.delete_file(path) + if response.get("error"): + error_msg = response.get("error", "Unknown error") + if check_error_message(error_msg, "NO_SUCH_FILE"): + raise SandboxFileNotFoundError(f"File not found: {path}") + raise SandboxFilesystemError(f"Failed to delete file: {error_msg}") + except (SandboxFileNotFoundError, SandboxFilesystemError): + raise except Exception as e: error_msg = str(e) - if "not found" in error_msg.lower(): - raise FileNotFoundError(f"File not found: {path}") - raise SandboxFilesystemError(f"Failed to delete file: {error_msg}") + if check_error_message(error_msg, "NO_SUCH_FILE"): + raise SandboxFileNotFoundError(f"File not found: {path}") from e + raise SandboxFilesystemError(f"Failed to delete file: {error_msg}") from e def delete_dir(self, path: str) -> None: """ @@ -185,14 +225,25 @@ def delete_dir(self, path: str) -> None: client = self._get_client() try: - client.delete_dir(path) + response = client.delete_dir(path) + if response.get("error"): + error_msg = response.get("error", "Unknown error") + if check_error_message(error_msg, "NO_SUCH_FILE"): + raise SandboxFileNotFoundError(f"Directory not found: {path}") + if check_error_message(error_msg, "DIR_NOT_EMPTY"): + raise SandboxFilesystemError(f"Directory not empty: {path}") + raise SandboxFilesystemError(f"Failed to delete directory: {error_msg}") + except (SandboxFileNotFoundError, SandboxFilesystemError): + raise except Exception as e: error_msg = str(e) - if "not found" in error_msg.lower(): - raise FileNotFoundError(f"Directory not found: {path}") - if "not empty" in error_msg.lower(): - raise SandboxFilesystemError(f"Directory not empty: {path}") - raise SandboxFilesystemError(f"Failed to delete directory: {error_msg}") + if check_error_message(error_msg, "NO_SUCH_FILE"): + raise SandboxFileNotFoundError(f"Directory not found: {path}") from e + if check_error_message(error_msg, "DIR_NOT_EMPTY"): + raise SandboxFilesystemError(f"Directory not empty: {path}") from e + raise SandboxFilesystemError( + f"Failed to delete directory: {error_msg}" + ) from e def rename_file(self, old_path: str, new_path: str) -> None: """ @@ -203,12 +254,15 @@ def rename_file(self, old_path: str, new_path: str) -> None: new_path: New file path """ # Use exec since there's no direct rename in SandboxClient + # Properly escape paths to prevent shell injection executor = self._get_executor() - result = executor(f"mv {old_path} {new_path}") + old_path_escaped = escape_shell_arg(old_path) + new_path_escaped = escape_shell_arg(new_path) + result = executor(f"mv {old_path_escaped} {new_path_escaped}") if not result.success: - if "No such file" in result.stderr: - raise FileNotFoundError(f"File not found: {old_path}") + if check_error_message(result.stderr, "NO_SUCH_FILE"): + raise SandboxFileNotFoundError(f"File not found: {old_path}") raise SandboxFilesystemError(f"Failed to rename file: {result.stderr}") def move_file(self, source_path: str, destination_path: str) -> None: @@ -220,12 +274,15 @@ def move_file(self, source_path: str, destination_path: str) -> None: destination_path: Destination path """ # Use exec since there's no direct move in SandboxClient + # Properly escape paths to prevent shell injection executor = self._get_executor() - result = executor(f"mv {source_path} {destination_path}") + source_path_escaped = escape_shell_arg(source_path) + destination_path_escaped = escape_shell_arg(destination_path) + result = executor(f"mv {source_path_escaped} {destination_path_escaped}") if not result.success: - if "No such file" in result.stderr: - raise FileNotFoundError(f"File not found: {source_path}") + if check_error_message(result.stderr, "NO_SUCH_FILE"): + raise SandboxFileNotFoundError(f"File not found: {source_path}") raise SandboxFilesystemError(f"Failed to move file: {result.stderr}") def write_files(self, files: List[Dict[str, str]]) -> None: @@ -244,50 +301,88 @@ def write_files(self, files: List[Dict[str, str]]) -> None: def exists(self, path: str) -> bool: """Check if file/directory exists synchronously""" executor = self._get_executor() - result = executor(f"test -e {path}") + path_escaped = escape_shell_arg(path) + result = executor(f"test -e {path_escaped}") return result.success def is_file(self, path: str) -> bool: """Check if path is a file synchronously""" executor = self._get_executor() - result = executor(f"test -f {path}") + path_escaped = escape_shell_arg(path) + result = executor(f"test -f {path_escaped}") return result.success def is_dir(self, path: str) -> bool: """Check if path is a directory synchronously""" executor = self._get_executor() - result = executor(f"test -d {path}") + path_escaped = escape_shell_arg(path) + result = executor(f"test -d {path_escaped}") return result.success - def upload_file(self, local_path: str, remote_path: str) -> None: + def upload_file( + self, local_path: str, remote_path: str, encoding: str = "utf-8" + ) -> None: """ Upload a local file to the sandbox synchronously. Args: local_path: Path to the local file remote_path: Destination path in the sandbox + encoding: File encoding (default: "utf-8"). Use "base64" for binary files. + + Raises: + SandboxFileNotFoundError: If local file doesn't exist + UnicodeDecodeError: If file cannot be decoded with specified encoding """ if not os.path.exists(local_path): - raise FileNotFoundError(f"Local file not found: {local_path}") + raise SandboxFileNotFoundError(f"Local file not found: {local_path}") with open(local_path, "rb") as f: - content = f.read().decode("utf-8") + content_bytes = f.read() - self.write_file(remote_path, content) + if encoding == "base64": + import base64 - def download_file(self, remote_path: str, local_path: str) -> None: + content = base64.b64encode(content_bytes).decode("ascii") + self.write_file(remote_path, content, encoding="base64") + else: + try: + content = content_bytes.decode(encoding) + self.write_file(remote_path, content, encoding=encoding) + except UnicodeDecodeError as e: + raise UnicodeDecodeError( + e.encoding, + e.object, + e.start, + e.end, + f"Cannot decode file as {encoding}. Use encoding='base64' for binary files.", + ) from e + + def download_file( + self, remote_path: str, local_path: str, encoding: str = "utf-8" + ) -> None: """ Download a file from the sandbox to a local path synchronously. Args: remote_path: Path to the file in the sandbox local_path: Destination path on the local filesystem + encoding: File encoding (default: "utf-8"). Use "base64" for binary files. + + Raises: + SandboxFileNotFoundError: If remote file doesn't exist """ - file_info = self.read_file(remote_path) - content = file_info.content.encode("utf-8") + file_info = self.read_file(remote_path, encoding=encoding) + + if encoding == "base64": + import base64 + + content_bytes = base64.b64decode(file_info.content) + else: + content_bytes = file_info.content.encode(encoding) with open(local_path, "wb") as f: - f.write(content) + f.write(content_bytes) def ls(self, path: str = ".") -> List[str]: """ @@ -310,15 +405,16 @@ def rm(self, path: str, recursive: bool = False) -> None: recursive: Remove recursively """ executor = self._get_executor() + path_escaped = escape_shell_arg(path) if recursive: - result = executor(f"rm -rf {path}") + result = executor(f"rm -rf {path_escaped}") else: - result = executor(f"rm {path}") + result = executor(f"rm {path_escaped}") if not result.success: - if "No such file or directory" in result.stderr: - raise FileNotFoundError(f"File not found: {path}") + if check_error_message(result.stderr, "NO_SUCH_FILE"): + raise SandboxFileNotFoundError(f"File not found: {path}") raise SandboxFilesystemError(f"Failed to remove: {result.stderr}") def open(self, path: str, mode: str = "r") -> SandboxFileIO: @@ -341,6 +437,20 @@ class AsyncSandboxFilesystem(SandboxFilesystem): Inherits from SandboxFilesystem and provides async methods. """ + def _run_sync(self, method, *args, **kwargs): + """ + Helper method to run a synchronous method in an executor. + + Args: + method: The sync method to run (from super()) + *args: Positional arguments for the method + **kwargs: Keyword arguments for the method + + Returns: + Result of the synchronous method call + """ + return run_sync_in_executor(method, *args, **kwargs) + async def write_file( self, path: str, content: Union[str, bytes], encoding: str = "utf-8" ) -> None: @@ -352,13 +462,7 @@ async def write_file( content: Content to write (string or bytes) encoding: File encoding (default: "utf-8"). Use "base64" for binary data. """ - loop = asyncio.get_running_loop() - await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).write_file( - path, content, encoding - ), - ) + await self._run_sync(super().write_file, path, content, encoding) async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: """ @@ -371,10 +475,7 @@ async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: Returns: FileInfo: Object with content and encoding """ - loop = asyncio.get_running_loop() - return await loop.run_in_executor( - None, lambda: super(AsyncSandboxFilesystem, self).read_file(path, encoding) - ) + return await self._run_sync(super().read_file, path, encoding) async def mkdir(self, path: str, recursive: bool = False) -> None: """ @@ -384,10 +485,7 @@ async def mkdir(self, path: str, recursive: bool = False) -> None: path: Absolute path to the directory recursive: Create parent directories if needed (default: False, not used - API always creates parents) """ - loop = asyncio.get_running_loop() - await loop.run_in_executor( - None, lambda: super(AsyncSandboxFilesystem, self).mkdir(path, recursive) - ) + await self._run_sync(super().mkdir, path, recursive) async def list_dir(self, path: str = ".") -> List[str]: """ @@ -399,10 +497,7 @@ async def list_dir(self, path: str = ".") -> List[str]: Returns: List[str]: Names of files and directories within the specified path. """ - loop = asyncio.get_running_loop() - return await loop.run_in_executor( - None, lambda: super(AsyncSandboxFilesystem, self).list_dir(path) - ) + return await self._run_sync(super().list_dir, path) async def delete_file(self, path: str) -> None: """ @@ -411,10 +506,7 @@ async def delete_file(self, path: str) -> None: Args: path: Absolute path to the file """ - loop = asyncio.get_running_loop() - await loop.run_in_executor( - None, lambda: super(AsyncSandboxFilesystem, self).delete_file(path) - ) + await self._run_sync(super().delete_file, path) async def delete_dir(self, path: str) -> None: """ @@ -423,10 +515,7 @@ async def delete_dir(self, path: str) -> None: Args: path: Absolute path to the directory """ - loop = asyncio.get_running_loop() - await loop.run_in_executor( - None, lambda: super(AsyncSandboxFilesystem, self).delete_dir(path) - ) + await self._run_sync(super().delete_dir, path) async def rename_file(self, old_path: str, new_path: str) -> None: """ @@ -436,11 +525,7 @@ async def rename_file(self, old_path: str, new_path: str) -> None: old_path: Current file path new_path: New file path """ - loop = asyncio.get_running_loop() - await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).rename_file(old_path, new_path), - ) + await self._run_sync(super().rename_file, old_path, new_path) async def move_file(self, source_path: str, destination_path: str) -> None: """ @@ -450,13 +535,7 @@ async def move_file(self, source_path: str, destination_path: str) -> None: source_path: Current file path destination_path: Destination path """ - loop = asyncio.get_running_loop() - await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).move_file( - source_path, destination_path - ), - ) + await self._run_sync(super().move_file, source_path, destination_path) async def write_files(self, files: List[Dict[str, str]]) -> None: """ @@ -473,56 +552,41 @@ async def write_files(self, files: List[Dict[str, str]]) -> None: async def exists(self, path: str) -> bool: """Check if file/directory exists asynchronously""" - loop = asyncio.get_running_loop() - return await loop.run_in_executor( - None, lambda: super(AsyncSandboxFilesystem, self).exists(path) - ) + return await self._run_sync(super().exists, path) async def is_file(self, path: str) -> bool: """Check if path is a file asynchronously""" - loop = asyncio.get_running_loop() - return await loop.run_in_executor( - None, lambda: super(AsyncSandboxFilesystem, self).is_file(path) - ) + return await self._run_sync(super().is_file, path) async def is_dir(self, path: str) -> bool: """Check if path is a directory asynchronously""" - loop = asyncio.get_running_loop() - return await loop.run_in_executor( - None, lambda: super(AsyncSandboxFilesystem, self).is_dir(path) - ) + return await self._run_sync(super().is_dir, path) - async def upload_file(self, local_path: str, remote_path: str) -> None: + async def upload_file( + self, local_path: str, remote_path: str, encoding: str = "utf-8" + ) -> None: """ Upload a local file to the sandbox asynchronously. Args: local_path: Path to the local file remote_path: Destination path in the sandbox + encoding: File encoding (default: "utf-8"). Use "base64" for binary files. """ - loop = asyncio.get_running_loop() - await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).upload_file( - local_path, remote_path - ), - ) + await self._run_sync(super().upload_file, local_path, remote_path, encoding) - async def download_file(self, remote_path: str, local_path: str) -> None: + async def download_file( + self, remote_path: str, local_path: str, encoding: str = "utf-8" + ) -> None: """ Download a file from the sandbox to a local path asynchronously. Args: remote_path: Path to the file in the sandbox local_path: Destination path on the local filesystem + encoding: File encoding (default: "utf-8"). Use "base64" for binary files. """ - loop = asyncio.get_running_loop() - await loop.run_in_executor( - None, - lambda: super(AsyncSandboxFilesystem, self).download_file( - remote_path, local_path - ), - ) + await self._run_sync(super().download_file, remote_path, local_path, encoding) async def ls(self, path: str = ".") -> List[str]: """ @@ -544,10 +608,7 @@ async def rm(self, path: str, recursive: bool = False) -> None: path: Path to remove recursive: Remove recursively """ - loop = asyncio.get_running_loop() - await loop.run_in_executor( - None, lambda: super(AsyncSandboxFilesystem, self).rm(path, recursive) - ) + await self._run_sync(super().rm, path, recursive) def open(self, path: str, mode: str = "r") -> AsyncSandboxFileIO: """ @@ -595,7 +656,7 @@ def write(self, content: str) -> None: try: existing = self.filesystem.read_file(self.path) content = existing.content + content - except FileNotFoundError: + except SandboxFileNotFoundError: pass self.filesystem.write_file(self.path, content) @@ -643,7 +704,7 @@ async def write(self, content: str) -> None: try: existing = await self.filesystem.read_file(self.path) content = existing.content + content - except FileNotFoundError: + except SandboxFileNotFoundError: pass await self.filesystem.write_file(self.path, content) diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 9a420765..5716cbcc 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -11,26 +11,48 @@ import secrets import time from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Dict, List, Optional, TypedDict +from koyeb.api.api.deployments_api import DeploymentsApi from koyeb.api.models.create_app import CreateApp +from koyeb.api.models.create_service import CreateService from .utils import ( + DEFAULT_INSTANCE_WAIT_TIMEOUT, + DEFAULT_POLL_INTERVAL, IdleTimeout, SandboxError, + _is_light_sleep_enabled, build_env_vars, create_deployment_definition, create_docker_source, create_koyeb_sandbox_routes, + create_sandbox_client, get_api_client, is_sandbox_healthy, + logger, + run_sync_in_executor, + validate_port, ) if TYPE_CHECKING: from .exec import AsyncSandboxExecutor, SandboxExecutor + from .executor_client import SandboxClient from .filesystem import AsyncSandboxFilesystem, SandboxFilesystem +class ProcessInfo(TypedDict, total=False): + """Type definition for process information returned by list_processes.""" + + id: str # Process ID (UUID string) + cmd: str # The command that was executed + status: str # Process status (e.g., "running", "completed") + pid: int # OS process ID (if running) + exit_code: int # Exit code (if completed) + started_at: str # ISO 8601 timestamp when process started + completed_at: str # ISO 8601 timestamp when process completed (if applicable) + + @dataclass class ExposedPort: """Result of exposing a port via TCP proxy.""" @@ -67,6 +89,7 @@ def __init__( self.sandbox_secret = sandbox_secret self._created_at = time.time() self._sandbox_url = None + self._client = None @classmethod def create( @@ -165,8 +188,6 @@ def _create_sync( env["SANDBOX_SECRET"] = sandbox_secret # Check if light sleep is enabled for this instance type - from .utils import _is_light_sleep_enabled - light_sleep_enabled = _is_light_sleep_enabled( instance_type, catalog_instances_api ) @@ -190,15 +211,11 @@ def _create_sync( enable_tcp_proxy=enable_tcp_proxy, ) - from koyeb.api.models.create_service import CreateService - create_service = CreateService(app_id=app_id, definition=deployment_definition) service_response = services_api.create_service(service=create_service) service_id = service_response.service.id deployment_id = service_response.service.latest_deployment_id - from koyeb.api.api.deployments_api import DeploymentsApi - deployments_api = DeploymentsApi(services_api.api_client) max_wait = min(timeout // 2, 60) if timeout > 60 else timeout @@ -215,15 +232,15 @@ def _create_sync( instance_id = scaling_response.replicas[0].instances[0].id break else: - print( + logger.debug( f"Waiting for instances to be created... (elapsed: {time.time() - start_time:.1f}s)" ) time.sleep(wait_interval) except Exception as e: - print(f"Error getting deployment scaling: {e}") + logger.warning(f"Error getting deployment scaling: {e}") time.sleep(wait_interval) else: - raise Exception( + raise SandboxError( f"No instances found in deployment after {max_wait} seconds" ) @@ -237,7 +254,11 @@ def _create_sync( sandbox_secret=sandbox_secret, ) - def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: + def wait_ready( + self, + timeout: int = DEFAULT_INSTANCE_WAIT_TIMEOUT, + poll_interval: float = DEFAULT_POLL_INTERVAL, + ) -> bool: """ Wait for sandbox to become ready with proper polling. @@ -275,7 +296,9 @@ def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: return False def wait_tcp_proxy_ready( - self, timeout: int = 60, poll_interval: float = 2.0 + self, + timeout: int = DEFAULT_INSTANCE_WAIT_TIMEOUT, + poll_interval: float = DEFAULT_POLL_INTERVAL, ) -> bool: """ Wait for TCP proxy to become ready and available. @@ -361,8 +384,6 @@ def get_tcp_proxy_info(self) -> Optional[tuple[str, int]]: return None # Get the active deployment - from koyeb.api.api.deployments_api import DeploymentsApi - deployments_api = DeploymentsApi() deployments_api.api_client = services_api.api_client deployment_response = deployments_api.get_deployment( @@ -400,6 +421,36 @@ def _get_sandbox_url(self) -> Optional[str]: self._sandbox_url = f"https://{domain}/koyeb-sandbox" return self._sandbox_url + def _get_client(self) -> "SandboxClient": # type: ignore[name-defined] + """ + Get or create SandboxClient instance with validation. + + Returns: + SandboxClient: Configured client instance + + Raises: + SandboxError: If sandbox URL or secret is not available + """ + if self._client is None: + sandbox_url = self._get_sandbox_url() + self._client = create_sandbox_client(sandbox_url, self.sandbox_secret) + return self._client + + def _check_response_error(self, response: Dict, operation: str) -> None: + """ + Check if a response indicates an error and raise SandboxError if so. + + Args: + response: The response dictionary to check + operation: Description of the operation (e.g., "expose port 8080") + + Raises: + SandboxError: If response indicates failure + """ + if not response.get("success", False): + error_msg = response.get("error", "Unknown error") + raise SandboxError(f"Failed to {operation}: {error_msg}") + def status(self) -> str: """Get current sandbox status""" from .utils import get_sandbox_status @@ -440,7 +491,7 @@ def expose_port(self, port: int) -> ExposedPort: Automatically unbinds any existing port before binding the new one. Args: - port: The internal port number to expose (must be a valid port number) + port: The internal port number to expose (must be a valid port number between 1 and 65535) Returns: ExposedPort: An object with `port` and `exposed_at` attributes: @@ -448,6 +499,7 @@ def expose_port(self, port: int) -> ExposedPort: - exposed_at: The full URL with https:// protocol (e.g., "https://app-name-org.koyeb.app") Raises: + ValueError: If port is not in valid range [1, 65535] SandboxError: If the port binding operation fails Notes: @@ -463,28 +515,20 @@ def expose_port(self, port: int) -> ExposedPort: >>> result.exposed_at 'https://app-name-org.koyeb.app' """ - from .executor_client import SandboxClient - - sandbox_url = self._get_sandbox_url() - if not sandbox_url: - raise SandboxError("Unable to get sandbox URL") - if not self.sandbox_secret: - raise SandboxError("Sandbox secret not available") - - client = SandboxClient(sandbox_url, self.sandbox_secret) + validate_port(port) + client = self._get_client() try: # Always unbind any existing port first try: client.unbind_port() - except Exception: + except Exception as e: # Ignore errors when unbinding - it's okay if no port was bound + logger.debug(f"Error unbinding existing port (this is okay): {e}") pass # Now bind the new port response = client.bind_port(port) - if not response.get("success", False): - error_msg = response.get("error", "Unknown error") - raise SandboxError(f"Failed to expose port {port}: {error_msg}") + self._check_response_error(response, f"expose port {port}") # Get domain for exposed_at domain = self.get_domain() @@ -514,20 +558,10 @@ def unexpose_port(self) -> None: - After unexposing, the TCP proxy will no longer forward traffic - Safe to call even if no port is currently bound """ - from .executor_client import SandboxClient - - sandbox_url = self._get_sandbox_url() - if not sandbox_url: - raise SandboxError("Unable to get sandbox URL") - if not self.sandbox_secret: - raise SandboxError("Sandbox secret not available") - - client = SandboxClient(sandbox_url, self.sandbox_secret) + client = self._get_client() try: response = client.unbind_port() - if not response.get("success", False): - error_msg = response.get("error", "Unknown error") - raise SandboxError(f"Failed to unexpose port: {error_msg}") + self._check_response_error(response, "unexpose port") except Exception as e: if isinstance(e, SandboxError): raise @@ -557,15 +591,7 @@ def launch_process( >>> process_id = sandbox.launch_process("python -u server.py") >>> print(f"Started process: {process_id}") """ - from .executor_client import SandboxClient - - sandbox_url = self._get_sandbox_url() - if not sandbox_url: - raise SandboxError("Unable to get sandbox URL") - if not self.sandbox_secret: - raise SandboxError("Sandbox secret not available") - - client = SandboxClient(sandbox_url, self.sandbox_secret) + client = self._get_client() try: response = client.start_process(cmd, cwd, env) # Check for process ID - if it exists, the process was launched successfully @@ -597,26 +623,16 @@ def kill_process(self, process_id: str) -> None: Example: >>> sandbox.kill_process("550e8400-e29b-41d4-a716-446655440000") """ - from .executor_client import SandboxClient - - sandbox_url = self._get_sandbox_url() - if not sandbox_url: - raise SandboxError("Unable to get sandbox URL") - if not self.sandbox_secret: - raise SandboxError("Sandbox secret not available") - - client = SandboxClient(sandbox_url, self.sandbox_secret) + client = self._get_client() try: response = client.kill_process(process_id) - if not response.get("success", False): - error_msg = response.get("error", "Unknown error") - raise SandboxError(f"Failed to kill process {process_id}: {error_msg}") + self._check_response_error(response, f"kill process {process_id}") except Exception as e: if isinstance(e, SandboxError): raise raise SandboxError(f"Failed to kill process {process_id}: {str(e)}") from e - def list_processes(self) -> List[Dict[str, Any]]: + def list_processes(self) -> List[ProcessInfo]: """ List all background processes. @@ -625,7 +641,7 @@ def list_processes(self) -> List[Dict[str, Any]]: (which remain in memory until server restart). Returns: - List[Dict[str, Any]]: List of process dictionaries, each containing: + List[ProcessInfo]: List of process dictionaries, each containing: - id: Process ID (UUID string) - cmd: The command that was executed - status: Process status (e.g., "running", "completed") @@ -642,15 +658,7 @@ def list_processes(self) -> List[Dict[str, Any]]: >>> for process in processes: ... print(f"{process['id']}: {process['cmd']} - {process['status']}") """ - from .executor_client import SandboxClient - - sandbox_url = self._get_sandbox_url() - if not sandbox_url: - raise SandboxError("Unable to get sandbox URL") - if not self.sandbox_secret: - raise SandboxError("Sandbox secret not available") - - client = SandboxClient(sandbox_url, self.sandbox_secret) + client = self._get_client() try: response = client.list_processes() return response.get("processes", []) @@ -691,6 +699,20 @@ def kill_all_processes(self) -> int: pass return killed_count + def __enter__(self) -> "Sandbox": + """Context manager entry - returns self.""" + return self + + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + """Context manager exit - automatically deletes the sandbox.""" + try: + # Clean up client if it exists + if self._client is not None: + self._client.close() + self.delete() + except Exception as e: + logger.warning(f"Error during sandbox cleanup: {e}") + class AsyncSandbox(Sandbox): """ @@ -698,6 +720,20 @@ class AsyncSandbox(Sandbox): Inherits from Sandbox and provides async wrappers for all operations. """ + def _run_sync(self, method, *args, **kwargs): + """ + Helper method to run a synchronous method in an executor. + + Args: + method: The sync method to run (from super()) + *args: Positional arguments for the method + **kwargs: Keyword arguments for the method + + Returns: + Result of the synchronous method call + """ + return run_sync_in_executor(method, *args, **kwargs) + @classmethod async def create( cls, @@ -779,7 +815,11 @@ async def create( return sandbox - async def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> bool: + async def wait_ready( + self, + timeout: int = DEFAULT_INSTANCE_WAIT_TIMEOUT, + poll_interval: float = DEFAULT_POLL_INTERVAL, + ) -> bool: """ Wait for sandbox to become ready with proper async polling. @@ -804,7 +844,9 @@ async def wait_ready(self, timeout: int = 60, poll_interval: float = 2.0) -> boo return False async def wait_tcp_proxy_ready( - self, timeout: int = 60, poll_interval: float = 2.0 + self, + timeout: int = DEFAULT_INSTANCE_WAIT_TIMEOUT, + poll_interval: float = DEFAULT_POLL_INTERVAL, ) -> bool: """ Wait for TCP proxy to become ready and available asynchronously. @@ -836,19 +878,15 @@ async def wait_tcp_proxy_ready( async def delete(self) -> None: """Delete the sandbox instance asynchronously.""" - loop = asyncio.get_running_loop() - await loop.run_in_executor(None, super().delete) + await self._run_sync(super().delete) async def status(self) -> str: """Get current sandbox status asynchronously""" - loop = asyncio.get_running_loop() - status_value = await loop.run_in_executor(None, super().status) - return status_value + return await self._run_sync(super().status) async def is_healthy(self) -> bool: """Check if sandbox is healthy and ready for operations asynchronously""" - loop = asyncio.get_running_loop() - return await loop.run_in_executor(None, super().is_healthy) + return await self._run_sync(super().is_healthy) @property def exec(self) -> "AsyncSandboxExecutor": @@ -866,44 +904,40 @@ def filesystem(self) -> "AsyncSandboxFilesystem": async def expose_port(self, port: int) -> ExposedPort: """Expose a port to external connections via TCP proxy asynchronously.""" - import asyncio - - loop = asyncio.get_running_loop() - return await loop.run_in_executor(None, super().expose_port, port) + return await self._run_sync(super().expose_port, port) async def unexpose_port(self) -> None: """Unexpose a port from external connections asynchronously.""" - import asyncio - - loop = asyncio.get_running_loop() - await loop.run_in_executor(None, super().unexpose_port) + await self._run_sync(super().unexpose_port) async def launch_process( self, cmd: str, cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None ) -> str: """Launch a background process in the sandbox asynchronously.""" - import asyncio - - loop = asyncio.get_running_loop() - return await loop.run_in_executor(None, super().launch_process, cmd, cwd, env) + return await self._run_sync(super().launch_process, cmd, cwd, env) async def kill_process(self, process_id: str) -> None: """Kill a background process by its ID asynchronously.""" - import asyncio + await self._run_sync(super().kill_process, process_id) - loop = asyncio.get_running_loop() - await loop.run_in_executor(None, super().kill_process, process_id) - - async def list_processes(self) -> List[Dict[str, Any]]: + async def list_processes(self) -> List[ProcessInfo]: """List all background processes asynchronously.""" - import asyncio - - loop = asyncio.get_running_loop() - return await loop.run_in_executor(None, super().list_processes) + return await self._run_sync(super().list_processes) async def kill_all_processes(self) -> int: """Kill all running background processes asynchronously.""" - import asyncio + return await self._run_sync(super().kill_all_processes) - loop = asyncio.get_running_loop() - return await loop.run_in_executor(None, super().kill_all_processes) + async def __aenter__(self) -> "AsyncSandbox": + """Async context manager entry - returns self.""" + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: + """Async context manager exit - automatically deletes the sandbox.""" + try: + # Clean up client if it exists + if self._client is not None: + self._client.close() + await self.delete() + except Exception as e: + logger.warning(f"Error during sandbox cleanup: {e}") diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index c2bef6e7..f36a3be5 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -4,8 +4,11 @@ Utility functions for Koyeb Sandbox """ +import asyncio +import logging import os -from typing import Dict, List, Literal, Optional, TypedDict, Union +import shlex +from typing import Any, Callable, Dict, List, Literal, Optional, TypedDict, Union from koyeb.api import ApiClient, Configuration from koyeb.api.api import AppsApi, CatalogInstancesApi, InstancesApi, ServicesApi @@ -26,7 +29,23 @@ from koyeb.api.models.instance_status import InstanceStatus from koyeb.api.models.proxy_port_protocol import ProxyPortProtocol -from .executor_client import SandboxClient +# Setup logging +logger = logging.getLogger(__name__) + +# Constants +MIN_PORT = 1 +MAX_PORT = 65535 +DEFAULT_INSTANCE_WAIT_TIMEOUT = 60 # seconds +DEFAULT_POLL_INTERVAL = 2.0 # seconds +DEFAULT_COMMAND_TIMEOUT = 30 # seconds +DEFAULT_HTTP_TIMEOUT = 30 # seconds for HTTP requests + +# Error messages +ERROR_MESSAGES = { + "NO_SUCH_FILE": ["No such file", "not found", "No such file or directory"], + "FILE_EXISTS": ["exists", "already exists"], + "DIR_NOT_EMPTY": ["not empty", "Directory not empty"], +} # Type definitions for idle timeout IdleTimeoutSeconds = int @@ -442,7 +461,11 @@ def get_sandbox_status( _, _, instances_api, _ = get_api_client(api_token) instance_response = instances_api.get_instance(instance_id) return instance_response.instance.status - except (NotFoundException, ApiException, Exception): + except (NotFoundException, ApiException) as e: + logger.debug(f"Failed to get sandbox status: {e}") + return InstanceStatus.ERROR + except Exception as e: + logger.warning(f"Unexpected error getting sandbox status: {e}") return InstanceStatus.ERROR @@ -487,18 +510,129 @@ def is_sandbox_healthy( # Check executor health try: + from .executor_client import SandboxClient + client = SandboxClient(sandbox_url, sandbox_secret) health_response = client.health() # Check if health response indicates the server is healthy # The exact response format may vary, but typically has a "status" field if isinstance(health_response, dict): status = health_response.get("status", "").lower() - return status in ["ok", "healthy", "ready"] + is_healthy = status in ["ok", "healthy", "ready"] + if not is_healthy: + logger.debug(f"Sandbox executor health check returned status: {status}") + return is_healthy return True # If we got a response, consider it healthy - except Exception: + except Exception as e: # If we can't reach the executor API, consider it unhealthy + logger.debug(f"Sandbox executor health check failed: {e}") + return False + + +def escape_shell_arg(arg: str) -> str: + """ + Escape a shell argument for safe use in shell commands. + + Args: + arg: The argument to escape + + Returns: + Properly escaped shell argument + """ + return shlex.quote(arg) + + +def validate_port(port: int) -> None: + """ + Validate that a port number is in the valid range. + + Args: + port: Port number to validate + + Raises: + ValueError: If port is not in valid range [1, 65535] + """ + if not isinstance(port, int) or port < MIN_PORT or port > MAX_PORT: + raise ValueError( + f"Port must be an integer between {MIN_PORT} and {MAX_PORT}, got {port}" + ) + + +def check_error_message(error_msg: str, error_type: str) -> bool: + """ + Check if an error message matches a specific error type. + Uses case-insensitive matching against known error patterns. + + Args: + error_msg: The error message to check + error_type: The type of error to check for (key in ERROR_MESSAGES) + + Returns: + True if error message matches the error type + """ + if error_type not in ERROR_MESSAGES: return False + error_msg_lower = error_msg.lower() + patterns = ERROR_MESSAGES[error_type] + return any(pattern.lower() in error_msg_lower for pattern in patterns) + + +async def run_sync_in_executor( + method: Callable[..., Any], *args: Any, **kwargs: Any +) -> Any: + """ + Run a synchronous method in an async executor. + + Helper function to wrap synchronous methods for async execution. + Used by AsyncSandbox and AsyncSandboxFilesystem to wrap sync parent methods. + + Args: + method: The synchronous method to run + *args: Positional arguments for the method + **kwargs: Keyword arguments for the method + + Returns: + Result of the synchronous method call + """ + loop = asyncio.get_running_loop() + return loop.run_in_executor(None, lambda: method(*args, **kwargs)) + + +def create_sandbox_client( + sandbox_url: Optional[str], + sandbox_secret: Optional[str], + existing_client: Optional[Any] = None, +) -> Any: + """ + Create or return existing SandboxClient instance with validation. + + Helper function to create SandboxClient instances with consistent validation. + Used by Sandbox, SandboxExecutor, and SandboxFilesystem to avoid duplication. + + Args: + sandbox_url: The sandbox URL (from _get_sandbox_url() or sandbox._get_sandbox_url()) + sandbox_secret: The sandbox secret + existing_client: Existing client instance to return if not None + + Returns: + SandboxClient: Configured client instance + + Raises: + SandboxError: If sandbox URL or secret is not available + """ + if existing_client is not None: + return existing_client + + if not sandbox_url: + raise SandboxError("Unable to get sandbox URL") + if not sandbox_secret: + raise SandboxError("Sandbox secret not available") + + from .executor_client import SandboxClient + + return SandboxClient(sandbox_url, sandbox_secret) + class SandboxError(Exception): """Base exception for sandbox operations""" From 6bfaf100c2d594e2a3346f2e900a8e73d53d1ecb Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Wed, 5 Nov 2025 08:47:40 +0100 Subject: [PATCH 34/47] update background processes examples --- examples/13_background_processes.py | 3 +++ examples/13_background_processes_async.py | 3 +++ examples/14_expose_port.py | 4 +++- examples/14_expose_port_async.py | 2 +- koyeb/sandbox/executor_client.py | 4 ++-- koyeb/sandbox/sandbox.py | 6 +++--- 6 files changed, 15 insertions(+), 7 deletions(-) diff --git a/examples/13_background_processes.py b/examples/13_background_processes.py index 37627298..b3094c6d 100755 --- a/examples/13_background_processes.py +++ b/examples/13_background_processes.py @@ -44,6 +44,7 @@ def main(): processes = sandbox.list_processes() for process in processes: print(f" ID: {process.get('id', 'N/A')}") + print(f" Command: {process.get('command', 'N/A')}") print(f" Status: {process.get('status', 'N/A')}") if process.get("pid"): print(f" PID: {process.get('pid')}") @@ -62,6 +63,7 @@ def main(): processes = sandbox.list_processes() for process in processes: print(f" ID: {process.get('id', 'N/A')}") + print(f" Command: {process.get('command', 'N/A')}") print(f" Status: {process.get('status', 'N/A')}") print() @@ -83,6 +85,7 @@ def main(): processes = sandbox.list_processes() for process in processes: print(f" ID: {process.get('id', 'N/A')}") + print(f" Command: {process.get('command', 'N/A')}") print(f" Status: {process.get('status', 'N/A')}") print() diff --git a/examples/13_background_processes_async.py b/examples/13_background_processes_async.py index 82043d34..5cc3b2d8 100755 --- a/examples/13_background_processes_async.py +++ b/examples/13_background_processes_async.py @@ -44,6 +44,7 @@ async def main(): processes = await sandbox.list_processes() for process in processes: print(f" ID: {process.get('id', 'N/A')}") + print(f" Command: {process.get('command', 'N/A')}") print(f" Status: {process.get('status', 'N/A')}") if process.get("pid"): print(f" PID: {process.get('pid')}") @@ -62,6 +63,7 @@ async def main(): processes = await sandbox.list_processes() for process in processes: print(f" ID: {process.get('id', 'N/A')}") + print(f" Command: {process.get('command', 'N/A')}") print(f" Status: {process.get('status', 'N/A')}") print() @@ -83,6 +85,7 @@ async def main(): processes = await sandbox.list_processes() for process in processes: print(f" ID: {process.get('id', 'N/A')}") + print(f" Command: {process.get('command', 'N/A')}") print(f" Status: {process.get('status', 'N/A')}") print() diff --git a/examples/14_expose_port.py b/examples/14_expose_port.py index 330110e2..448a6d38 100755 --- a/examples/14_expose_port.py +++ b/examples/14_expose_port.py @@ -69,7 +69,9 @@ def main(): processes = sandbox.list_processes() for process in processes: if process.get("status") == "running": - print(f" {process.get('id')}: {process.get('status')}") + print( + f" {process.get('id')}: {process.get('command')} - {process.get('status')}" + ) # Switch to a different port (e.g., 8081) print("\nSwitching to port 8081...") diff --git a/examples/14_expose_port_async.py b/examples/14_expose_port_async.py index 1abf79c7..929a70d1 100755 --- a/examples/14_expose_port_async.py +++ b/examples/14_expose_port_async.py @@ -73,7 +73,7 @@ async def main(): for process in processes: if process.get("status") == "running": print( - f" {process.get('id')}: {process.get('cmd')} - {process.get('status')}" + f" {process.get('id')}: {process.get('command')} - {process.get('status')}" ) # Switch to a different port (e.g., 8081) diff --git a/koyeb/sandbox/executor_client.py b/koyeb/sandbox/executor_client.py index feb71eab..95b6c0a5 100644 --- a/koyeb/sandbox/executor_client.py +++ b/koyeb/sandbox/executor_client.py @@ -473,7 +473,7 @@ def list_processes(self) -> Dict[str, Any]: Dict with a list of processes: - processes: List of process objects, each containing: - id: Process ID (UUID string) - - cmd: The command that was executed + - command: The command that was executed - status: Process status (e.g., "running", "completed") - pid: OS process ID (if running) - exit_code: Exit code (if completed) @@ -484,7 +484,7 @@ def list_processes(self) -> Dict[str, Any]: >>> client = SandboxClient("http://localhost:8080", "secret") >>> result = client.list_processes() >>> for process in result.get("processes", []): - ... print(f"{process['id']}: {process['cmd']} - {process['status']}") + ... print(f"{process['id']}: {process['command']} - {process['status']}") """ response = self._request_with_retry( "GET", f"{self.base_url}/list_processes", headers=self.headers diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 5716cbcc..96a3b560 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -45,7 +45,7 @@ class ProcessInfo(TypedDict, total=False): """Type definition for process information returned by list_processes.""" id: str # Process ID (UUID string) - cmd: str # The command that was executed + command: str # The command that was executed status: str # Process status (e.g., "running", "completed") pid: int # OS process ID (if running) exit_code: int # Exit code (if completed) @@ -643,7 +643,7 @@ def list_processes(self) -> List[ProcessInfo]: Returns: List[ProcessInfo]: List of process dictionaries, each containing: - id: Process ID (UUID string) - - cmd: The command that was executed + - command: The command that was executed - status: Process status (e.g., "running", "completed") - pid: OS process ID (if running) - exit_code: Exit code (if completed) @@ -656,7 +656,7 @@ def list_processes(self) -> List[ProcessInfo]: Example: >>> processes = sandbox.list_processes() >>> for process in processes: - ... print(f"{process['id']}: {process['cmd']} - {process['status']}") + ... print(f"{process['id']}: {process['command']} - {process['status']}") """ client = self._get_client() try: From de8765e4f141c030b7143ff2f342bccc639d74ae Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Wed, 5 Nov 2025 11:04:05 +0100 Subject: [PATCH 35/47] fix async convert process info to dataclass --- examples/13_background_processes.py | 22 +++++------ examples/13_background_processes_async.py | 22 +++++------ examples/14_expose_port.py | 6 +-- examples/14_expose_port_async.py | 6 +-- koyeb/sandbox/filesystem.py | 4 +- koyeb/sandbox/sandbox.py | 45 ++++++++++++++++------- koyeb/sandbox/utils.py | 2 +- 7 files changed, 60 insertions(+), 47 deletions(-) diff --git a/examples/13_background_processes.py b/examples/13_background_processes.py index b3094c6d..a373ee36 100755 --- a/examples/13_background_processes.py +++ b/examples/13_background_processes.py @@ -43,11 +43,11 @@ def main(): print("\nListing all processes:") processes = sandbox.list_processes() for process in processes: - print(f" ID: {process.get('id', 'N/A')}") - print(f" Command: {process.get('command', 'N/A')}") - print(f" Status: {process.get('status', 'N/A')}") - if process.get("pid"): - print(f" PID: {process.get('pid')}") + print(f" ID: {process.id}") + print(f" Command: {process.command}") + print(f" Status: {process.status}") + if process.pid: + print(f" PID: {process.pid}") print() # Kill a specific process @@ -62,9 +62,9 @@ def main(): print("\nListing processes after kill:") processes = sandbox.list_processes() for process in processes: - print(f" ID: {process.get('id', 'N/A')}") - print(f" Command: {process.get('command', 'N/A')}") - print(f" Status: {process.get('status', 'N/A')}") + print(f" ID: {process.id}") + print(f" Command: {process.command}") + print(f" Status: {process.status}") print() # Launch a few more processes @@ -84,9 +84,9 @@ def main(): print("\nFinal process list:") processes = sandbox.list_processes() for process in processes: - print(f" ID: {process.get('id', 'N/A')}") - print(f" Command: {process.get('command', 'N/A')}") - print(f" Status: {process.get('status', 'N/A')}") + print(f" ID: {process.id}") + print(f" Command: {process.command}") + print(f" Status: {process.status}") print() except Exception as e: diff --git a/examples/13_background_processes_async.py b/examples/13_background_processes_async.py index 5cc3b2d8..169990de 100755 --- a/examples/13_background_processes_async.py +++ b/examples/13_background_processes_async.py @@ -43,11 +43,11 @@ async def main(): print("\nListing all processes:") processes = await sandbox.list_processes() for process in processes: - print(f" ID: {process.get('id', 'N/A')}") - print(f" Command: {process.get('command', 'N/A')}") - print(f" Status: {process.get('status', 'N/A')}") - if process.get("pid"): - print(f" PID: {process.get('pid')}") + print(f" ID: {process.id}") + print(f" Command: {process.command}") + print(f" Status: {process.status}") + if process.pid: + print(f" PID: {process.pid}") print() # Kill a specific process @@ -62,9 +62,9 @@ async def main(): print("\nListing processes after kill:") processes = await sandbox.list_processes() for process in processes: - print(f" ID: {process.get('id', 'N/A')}") - print(f" Command: {process.get('command', 'N/A')}") - print(f" Status: {process.get('status', 'N/A')}") + print(f" ID: {process.id}") + print(f" Command: {process.command}") + print(f" Status: {process.status}") print() # Launch a few more processes @@ -84,9 +84,9 @@ async def main(): print("\nFinal process list:") processes = await sandbox.list_processes() for process in processes: - print(f" ID: {process.get('id', 'N/A')}") - print(f" Command: {process.get('command', 'N/A')}") - print(f" Status: {process.get('status', 'N/A')}") + print(f" ID: {process.id}") + print(f" Command: {process.command}") + print(f" Status: {process.status}") print() except Exception as e: diff --git a/examples/14_expose_port.py b/examples/14_expose_port.py index 448a6d38..af91ec6a 100755 --- a/examples/14_expose_port.py +++ b/examples/14_expose_port.py @@ -68,10 +68,8 @@ def main(): print("\nRunning processes:") processes = sandbox.list_processes() for process in processes: - if process.get("status") == "running": - print( - f" {process.get('id')}: {process.get('command')} - {process.get('status')}" - ) + if process.status == "running": + print(f" {process.id}: {process.command} - {process.status}") # Switch to a different port (e.g., 8081) print("\nSwitching to port 8081...") diff --git a/examples/14_expose_port_async.py b/examples/14_expose_port_async.py index 929a70d1..50ab62f6 100755 --- a/examples/14_expose_port_async.py +++ b/examples/14_expose_port_async.py @@ -71,10 +71,8 @@ async def main(): print("\nRunning processes:") processes = await sandbox.list_processes() for process in processes: - if process.get("status") == "running": - print( - f" {process.get('id')}: {process.get('command')} - {process.get('status')}" - ) + if process.status == "running": + print(f" {process.id}: {process.command} - {process.status}") # Switch to a different port (e.g., 8081) print("\nSwitching to port 8081...") diff --git a/koyeb/sandbox/filesystem.py b/koyeb/sandbox/filesystem.py index 873d429a..3804a802 100644 --- a/koyeb/sandbox/filesystem.py +++ b/koyeb/sandbox/filesystem.py @@ -437,7 +437,7 @@ class AsyncSandboxFilesystem(SandboxFilesystem): Inherits from SandboxFilesystem and provides async methods. """ - def _run_sync(self, method, *args, **kwargs): + async def _run_sync(self, method, *args, **kwargs): """ Helper method to run a synchronous method in an executor. @@ -449,7 +449,7 @@ def _run_sync(self, method, *args, **kwargs): Returns: Result of the synchronous method call """ - return run_sync_in_executor(method, *args, **kwargs) + return await run_sync_in_executor(method, *args, **kwargs) async def write_file( self, path: str, content: Union[str, bytes], encoding: str = "utf-8" diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 96a3b560..12564c68 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -11,7 +11,7 @@ import secrets import time from dataclasses import dataclass -from typing import TYPE_CHECKING, Dict, List, Optional, TypedDict +from typing import TYPE_CHECKING, Dict, List, Optional from koyeb.api.api.deployments_api import DeploymentsApi from koyeb.api.models.create_app import CreateApp @@ -41,16 +41,19 @@ from .filesystem import AsyncSandboxFilesystem, SandboxFilesystem -class ProcessInfo(TypedDict, total=False): +@dataclass +class ProcessInfo: """Type definition for process information returned by list_processes.""" id: str # Process ID (UUID string) command: str # The command that was executed status: str # Process status (e.g., "running", "completed") - pid: int # OS process ID (if running) - exit_code: int # Exit code (if completed) - started_at: str # ISO 8601 timestamp when process started - completed_at: str # ISO 8601 timestamp when process completed (if applicable) + pid: Optional[int] = None # OS process ID (if running) + exit_code: Optional[int] = None # Exit code (if completed) + started_at: Optional[str] = None # ISO 8601 timestamp when process started + completed_at: Optional[str] = ( + None # ISO 8601 timestamp when process completed (if applicable) + ) @dataclass @@ -641,7 +644,7 @@ def list_processes(self) -> List[ProcessInfo]: (which remain in memory until server restart). Returns: - List[ProcessInfo]: List of process dictionaries, each containing: + List[ProcessInfo]: List of process objects, each containing: - id: Process ID (UUID string) - command: The command that was executed - status: Process status (e.g., "running", "completed") @@ -656,12 +659,13 @@ def list_processes(self) -> List[ProcessInfo]: Example: >>> processes = sandbox.list_processes() >>> for process in processes: - ... print(f"{process['id']}: {process['command']} - {process['status']}") + ... print(f"{process.id}: {process.command} - {process.status}") """ client = self._get_client() try: response = client.list_processes() - return response.get("processes", []) + processes_data = response.get("processes", []) + return [ProcessInfo(**process) for process in processes_data] except Exception as e: if isinstance(e, SandboxError): raise @@ -687,8 +691,8 @@ def kill_all_processes(self) -> int: processes = self.list_processes() killed_count = 0 for process in processes: - process_id = process.get("id") - status = process.get("status", "") + process_id = process.id + status = process.status # Only kill running processes if process_id and status == "running": try: @@ -720,7 +724,7 @@ class AsyncSandbox(Sandbox): Inherits from Sandbox and provides async wrappers for all operations. """ - def _run_sync(self, method, *args, **kwargs): + async def _run_sync(self, method, *args, **kwargs): """ Helper method to run a synchronous method in an executor. @@ -732,7 +736,7 @@ def _run_sync(self, method, *args, **kwargs): Returns: Result of the synchronous method call """ - return run_sync_in_executor(method, *args, **kwargs) + return await run_sync_in_executor(method, *args, **kwargs) @classmethod async def create( @@ -926,7 +930,20 @@ async def list_processes(self) -> List[ProcessInfo]: async def kill_all_processes(self) -> int: """Kill all running background processes asynchronously.""" - return await self._run_sync(super().kill_all_processes) + processes = await self.list_processes() + killed_count = 0 + for process in processes: + process_id = process.id + status = process.status + # Only kill running processes + if process_id and status == "running": + try: + await self.kill_process(process_id) + killed_count += 1 + except SandboxError: + # Continue killing other processes even if one fails + pass + return killed_count async def __aenter__(self) -> "AsyncSandbox": """Async context manager entry - returns self.""" diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index f36a3be5..12daed45 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -596,7 +596,7 @@ async def run_sync_in_executor( Result of the synchronous method call """ loop = asyncio.get_running_loop() - return loop.run_in_executor(None, lambda: method(*args, **kwargs)) + return await loop.run_in_executor(None, lambda: method(*args, **kwargs)) def create_sandbox_client( From 24af1119f1f59edbeff8abb820bd02b5e0294a9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Ole=C5=9B?= Date: Wed, 5 Nov 2025 11:13:45 +0100 Subject: [PATCH 36/47] Fix deps --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 95189c3d..b523edd0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,7 @@ dependencies = [ "pydantic (>=2)", "typing-extensions (>=4.7.1)", "websockets>=15.0.1", + "requests>=2.32.5", ] [project.urls] From 932303534cdef42f8521ed0bb0761ad333e1d2f5 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Wed, 5 Nov 2025 11:29:14 +0100 Subject: [PATCH 37/47] add async_wrapper decorator --- koyeb/sandbox/filesystem.py | 43 +++++++++++++++++++++++++------------ koyeb/sandbox/sandbox.py | 25 ++++++++++++++------- koyeb/sandbox/utils.py | 38 ++++++++++++++++++++++++++++++++ 3 files changed, 84 insertions(+), 22 deletions(-) diff --git a/koyeb/sandbox/filesystem.py b/koyeb/sandbox/filesystem.py index 3804a802..ae072dae 100644 --- a/koyeb/sandbox/filesystem.py +++ b/koyeb/sandbox/filesystem.py @@ -14,6 +14,7 @@ from .executor_client import SandboxClient from .utils import ( SandboxError, + async_wrapper, check_error_message, create_sandbox_client, escape_shell_arg, @@ -451,6 +452,7 @@ async def _run_sync(self, method, *args, **kwargs): """ return await run_sync_in_executor(method, *args, **kwargs) + @async_wrapper("write_file") async def write_file( self, path: str, content: Union[str, bytes], encoding: str = "utf-8" ) -> None: @@ -462,8 +464,9 @@ async def write_file( content: Content to write (string or bytes) encoding: File encoding (default: "utf-8"). Use "base64" for binary data. """ - await self._run_sync(super().write_file, path, content, encoding) + pass + @async_wrapper("read_file") async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: """ Read a file from the sandbox asynchronously. @@ -475,8 +478,9 @@ async def read_file(self, path: str, encoding: str = "utf-8") -> FileInfo: Returns: FileInfo: Object with content and encoding """ - return await self._run_sync(super().read_file, path, encoding) + pass + @async_wrapper("mkdir") async def mkdir(self, path: str, recursive: bool = False) -> None: """ Create a directory asynchronously. @@ -485,8 +489,9 @@ async def mkdir(self, path: str, recursive: bool = False) -> None: path: Absolute path to the directory recursive: Create parent directories if needed (default: False, not used - API always creates parents) """ - await self._run_sync(super().mkdir, path, recursive) + pass + @async_wrapper("list_dir") async def list_dir(self, path: str = ".") -> List[str]: """ List contents of a directory asynchronously. @@ -497,8 +502,9 @@ async def list_dir(self, path: str = ".") -> List[str]: Returns: List[str]: Names of files and directories within the specified path. """ - return await self._run_sync(super().list_dir, path) + pass + @async_wrapper("delete_file") async def delete_file(self, path: str) -> None: """ Delete a file asynchronously. @@ -506,8 +512,9 @@ async def delete_file(self, path: str) -> None: Args: path: Absolute path to the file """ - await self._run_sync(super().delete_file, path) + pass + @async_wrapper("delete_dir") async def delete_dir(self, path: str) -> None: """ Delete a directory asynchronously. @@ -515,8 +522,9 @@ async def delete_dir(self, path: str) -> None: Args: path: Absolute path to the directory """ - await self._run_sync(super().delete_dir, path) + pass + @async_wrapper("rename_file") async def rename_file(self, old_path: str, new_path: str) -> None: """ Rename a file asynchronously. @@ -525,8 +533,9 @@ async def rename_file(self, old_path: str, new_path: str) -> None: old_path: Current file path new_path: New file path """ - await self._run_sync(super().rename_file, old_path, new_path) + pass + @async_wrapper("move_file") async def move_file(self, source_path: str, destination_path: str) -> None: """ Move a file to a different directory asynchronously. @@ -535,7 +544,7 @@ async def move_file(self, source_path: str, destination_path: str) -> None: source_path: Current file path destination_path: Destination path """ - await self._run_sync(super().move_file, source_path, destination_path) + pass async def write_files(self, files: List[Dict[str, str]]) -> None: """ @@ -550,18 +559,22 @@ async def write_files(self, files: List[Dict[str, str]]) -> None: encoding = file_info.get("encoding", "utf-8") await self.write_file(path, content, encoding) + @async_wrapper("exists") async def exists(self, path: str) -> bool: """Check if file/directory exists asynchronously""" - return await self._run_sync(super().exists, path) + pass + @async_wrapper("is_file") async def is_file(self, path: str) -> bool: """Check if path is a file asynchronously""" - return await self._run_sync(super().is_file, path) + pass + @async_wrapper("is_dir") async def is_dir(self, path: str) -> bool: """Check if path is a directory asynchronously""" - return await self._run_sync(super().is_dir, path) + pass + @async_wrapper("upload_file") async def upload_file( self, local_path: str, remote_path: str, encoding: str = "utf-8" ) -> None: @@ -573,8 +586,9 @@ async def upload_file( remote_path: Destination path in the sandbox encoding: File encoding (default: "utf-8"). Use "base64" for binary files. """ - await self._run_sync(super().upload_file, local_path, remote_path, encoding) + pass + @async_wrapper("download_file") async def download_file( self, remote_path: str, local_path: str, encoding: str = "utf-8" ) -> None: @@ -586,7 +600,7 @@ async def download_file( local_path: Destination path on the local filesystem encoding: File encoding (default: "utf-8"). Use "base64" for binary files. """ - await self._run_sync(super().download_file, remote_path, local_path, encoding) + pass async def ls(self, path: str = ".") -> List[str]: """ @@ -600,6 +614,7 @@ async def ls(self, path: str = ".") -> List[str]: """ return await self.list_dir(path) + @async_wrapper("rm") async def rm(self, path: str, recursive: bool = False) -> None: """ Remove file or directory asynchronously. @@ -608,7 +623,7 @@ async def rm(self, path: str, recursive: bool = False) -> None: path: Path to remove recursive: Remove recursively """ - await self._run_sync(super().rm, path, recursive) + pass def open(self, path: str, mode: str = "r") -> AsyncSandboxFileIO: """ diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 12564c68..5dd942f8 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -23,6 +23,7 @@ IdleTimeout, SandboxError, _is_light_sleep_enabled, + async_wrapper, build_env_vars, create_deployment_definition, create_docker_source, @@ -880,17 +881,20 @@ async def wait_tcp_proxy_ready( return False + @async_wrapper("delete") async def delete(self) -> None: """Delete the sandbox instance asynchronously.""" - await self._run_sync(super().delete) + pass + @async_wrapper("status") async def status(self) -> str: """Get current sandbox status asynchronously""" - return await self._run_sync(super().status) + pass + @async_wrapper("is_healthy") async def is_healthy(self) -> bool: """Check if sandbox is healthy and ready for operations asynchronously""" - return await self._run_sync(super().is_healthy) + pass @property def exec(self) -> "AsyncSandboxExecutor": @@ -906,27 +910,32 @@ def filesystem(self) -> "AsyncSandboxFilesystem": return AsyncSandboxFilesystem(self) + @async_wrapper("expose_port") async def expose_port(self, port: int) -> ExposedPort: """Expose a port to external connections via TCP proxy asynchronously.""" - return await self._run_sync(super().expose_port, port) + pass + @async_wrapper("unexpose_port") async def unexpose_port(self) -> None: """Unexpose a port from external connections asynchronously.""" - await self._run_sync(super().unexpose_port) + pass + @async_wrapper("launch_process") async def launch_process( self, cmd: str, cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None ) -> str: """Launch a background process in the sandbox asynchronously.""" - return await self._run_sync(super().launch_process, cmd, cwd, env) + pass + @async_wrapper("kill_process") async def kill_process(self, process_id: str) -> None: """Kill a background process by its ID asynchronously.""" - await self._run_sync(super().kill_process, process_id) + pass + @async_wrapper("list_processes") async def list_processes(self) -> List[ProcessInfo]: """List all background processes asynchronously.""" - return await self._run_sync(super().list_processes) + pass async def kill_all_processes(self) -> int: """Kill all running background processes asynchronously.""" diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index 12daed45..829255bb 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -599,6 +599,44 @@ async def run_sync_in_executor( return await loop.run_in_executor(None, lambda: method(*args, **kwargs)) +def async_wrapper(method_name: str): + """ + Decorator to automatically create async wrapper for sync methods. + + This decorator creates an async method that wraps a sync method from the parent class. + The sync method is called via super() and executed in an executor. + + Args: + method_name: Name of the sync method to wrap (from parent class) + + Usage: + @async_wrapper("delete") + async def delete(self) -> None: + \"\"\"Delete the sandbox instance asynchronously.\"\"\" + pass # Implementation is handled by decorator + """ + + def decorator(func): + async def wrapper(self, *args, **kwargs): + # Get the parent class from MRO (Method Resolution Order) + # __mro__[0] is the current class, __mro__[1] is the parent + parent_class = self.__class__.__mro__[1] + # Get the unbound method from parent class + sync_method = getattr(parent_class, method_name) + # Bind it to self (equivalent to super().method_name) + bound_method = sync_method.__get__(self, parent_class) + return await self._run_sync(bound_method, *args, **kwargs) + + # Preserve function metadata + wrapper.__name__ = func.__name__ + wrapper.__qualname__ = func.__qualname__ + wrapper.__doc__ = func.__doc__ or f"{method_name} (async version)" + wrapper.__annotations__ = func.__annotations__ + return wrapper + + return decorator + + def create_sandbox_client( sandbox_url: Optional[str], sandbox_secret: Optional[str], From 61e66ed40ac2ce106bcde8775a9799b19bf185d7 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Wed, 5 Nov 2025 12:55:25 +0100 Subject: [PATCH 38/47] Update uv.lock --- uv.lock | 2 ++ 1 file changed, 2 insertions(+) diff --git a/uv.lock b/uv.lock index c1c513ac..2b1c4dff 100644 --- a/uv.lock +++ b/uv.lock @@ -381,6 +381,7 @@ source = { editable = "." } dependencies = [ { name = "pydantic" }, { name = "python-dateutil" }, + { name = "requests" }, { name = "typing-extensions" }, { name = "urllib3" }, { name = "websockets" }, @@ -399,6 +400,7 @@ dev = [ requires-dist = [ { name = "pydantic", specifier = ">=2" }, { name = "python-dateutil", specifier = ">=2.8.2" }, + { name = "requests", specifier = ">=2.32.5" }, { name = "typing-extensions", specifier = ">=4.7.1" }, { name = "urllib3", specifier = ">=2.1.0,<3.0.0" }, { name = "websockets", specifier = ">=15.0.1" }, From 532b799c2095b1ecc7998a694bd2cb35cdbe7909 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Wed, 5 Nov 2025 13:21:29 +0100 Subject: [PATCH 39/47] add sandbox.get_from_id with examples --- examples/15_get_sandbox.py | 74 ++++++++++++++++ examples/15_get_sandbox_async.py | 77 ++++++++++++++++ examples/README.md | 1 + koyeb/sandbox/sandbox.py | 146 ++++++++++++++++++++++++++++++- 4 files changed, 297 insertions(+), 1 deletion(-) create mode 100644 examples/15_get_sandbox.py create mode 100644 examples/15_get_sandbox_async.py diff --git a/examples/15_get_sandbox.py b/examples/15_get_sandbox.py new file mode 100644 index 00000000..25b30b97 --- /dev/null +++ b/examples/15_get_sandbox.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 +"""Create a sandbox and then retrieve it by service ID""" + +import os + +from koyeb import Sandbox + + +def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + original_sandbox = None + retrieved_sandbox = None + + try: + # Step 1: Create a new sandbox + print("Creating a new sandbox...") + original_sandbox = Sandbox.create( + image="koyeb/sandbox", + name="example-sandbox", + wait_ready=True, + api_token=api_token, + ) + + print(f"✓ Created sandbox: {original_sandbox.name}") + print(f" Service ID: {original_sandbox.service_id}") + print(f" App ID: {original_sandbox.app_id}") + print(f" Instance ID: {original_sandbox.instance_id}") + + # Execute a command with the original sandbox + result = original_sandbox.exec("echo 'Hello from original sandbox!'") + print(f" Original sandbox output: {result.stdout.strip()}") + + # Step 2: Retrieve the same sandbox using its service ID + print("\nRetrieving sandbox by service ID...") + retrieved_sandbox = Sandbox.get_from_id( + id=original_sandbox.id, + api_token=api_token, + ) + + print(f"✓ Retrieved sandbox: {retrieved_sandbox.name}") + print(f" Service ID: {retrieved_sandbox.service_id}") + print(f" App ID: {retrieved_sandbox.app_id}") + print(f" Instance ID: {retrieved_sandbox.instance_id}") + + # Verify it's the same sandbox + assert original_sandbox.id == retrieved_sandbox.id, "Sandbox IDs should match!" + print(" ✓ Confirmed: Same sandbox retrieved") + + # Check status + status = retrieved_sandbox.status() + is_healthy = retrieved_sandbox.is_healthy() + print(f" Status: {status}, Healthy: {is_healthy}") + + # Execute a command with the retrieved sandbox + if is_healthy: + result = retrieved_sandbox.exec("echo 'Hello from retrieved sandbox!'") + print(f" Retrieved sandbox output: {result.stdout.strip()}") + + except Exception as e: + print(f"Error: {e}") + finally: + # Cleanup: delete the sandbox (works from either instance) + if original_sandbox: + print("\nCleaning up...") + original_sandbox.delete() + print("✓ Sandbox deleted") + + +if __name__ == "__main__": + main() diff --git a/examples/15_get_sandbox_async.py b/examples/15_get_sandbox_async.py new file mode 100644 index 00000000..ec3fefec --- /dev/null +++ b/examples/15_get_sandbox_async.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +"""Create a sandbox and then retrieve it by service ID (async)""" + +import asyncio +import os + +from koyeb import AsyncSandbox + + +async def main(): + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + print("Error: KOYEB_API_TOKEN not set") + return + + original_sandbox = None + retrieved_sandbox = None + + try: + # Step 1: Create a new sandbox + print("Creating a new sandbox...") + original_sandbox = await AsyncSandbox.create( + image="koyeb/sandbox", + name="example-sandbox", + wait_ready=True, + api_token=api_token, + ) + + print(f"✓ Created sandbox: {original_sandbox.name}") + print(f" Service ID: {original_sandbox.service_id}") + print(f" App ID: {original_sandbox.app_id}") + print(f" Instance ID: {original_sandbox.instance_id}") + + # Execute a command with the original sandbox + result = await original_sandbox.exec("echo 'Hello from original sandbox!'") + print(f" Original sandbox output: {result.stdout.strip()}") + + # Step 2: Retrieve the same sandbox using its service ID + print("\nRetrieving sandbox by service ID...") + retrieved_sandbox = await AsyncSandbox.get_from_id( + id=original_sandbox.id, + api_token=api_token, + ) + + print(f"✓ Retrieved sandbox: {retrieved_sandbox.name}") + print(f" Service ID: {retrieved_sandbox.service_id}") + print(f" App ID: {retrieved_sandbox.app_id}") + print(f" Instance ID: {retrieved_sandbox.instance_id}") + + # Verify it's the same sandbox + assert original_sandbox.id == retrieved_sandbox.id, "Sandbox IDs should match!" + print(" ✓ Confirmed: Same sandbox retrieved") + + # Check status + status = await retrieved_sandbox.status() + is_healthy = await retrieved_sandbox.is_healthy() + print(f" Status: {status}, Healthy: {is_healthy}") + + # Execute a command with the retrieved sandbox + if is_healthy: + result = await retrieved_sandbox.exec( + "echo 'Hello from retrieved sandbox!'" + ) + print(f" Retrieved sandbox output: {result.stdout.strip()}") + + except Exception as e: + print(f"Error: {e}") + finally: + # Cleanup: delete the sandbox (works from either instance) + if original_sandbox: + print("\nCleaning up...") + await original_sandbox.delete() + print("✓ Sandbox deleted") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/README.md b/examples/README.md index ec95e5ca..9dddf90a 100644 --- a/examples/README.md +++ b/examples/README.md @@ -28,6 +28,7 @@ uv run python examples/01_create_sandbox.py - **12_file_manipulation.py** - File manipulation operations - **13_background_processes.py** - Background process management (launch, list, kill) - **14_expose_port.py** - Port exposure via TCP proxy with HTTP verification +- **15_get_sandbox.py** - Create a sandbox and then retrieve it by ID ## Basic Usage diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 5dd942f8..1666c592 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -14,6 +14,7 @@ from typing import TYPE_CHECKING, Dict, List, Optional from koyeb.api.api.deployments_api import DeploymentsApi +from koyeb.api.exceptions import ApiException, NotFoundException from koyeb.api.models.create_app import CreateApp from koyeb.api.models.create_service import CreateService @@ -95,6 +96,11 @@ def __init__( self._sandbox_url = None self._client = None + @property + def id(self) -> str: + """Get the service ID of the sandbox.""" + return self.service_id + @classmethod def create( cls, @@ -203,7 +209,7 @@ def _create_sync( env_vars = build_env_vars(env) docker_source = create_docker_source(image, []) deployment_definition = create_deployment_definition( - name=f"sandbox-service-{name}", + name=name, docker_source=docker_source, env_vars=env_vars, instance_type=instance_type, @@ -258,6 +264,106 @@ def _create_sync( sandbox_secret=sandbox_secret, ) + @classmethod + def get_from_id( + cls, + id: str, + api_token: Optional[str] = None, + ) -> "Sandbox": + """ + Get a sandbox by service ID. + + Args: + id: Service ID of the sandbox + api_token: Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) + + Returns: + Sandbox: The Sandbox instance + + Raises: + ValueError: If API token is not provided or id is invalid + SandboxError: If sandbox is not found or retrieval fails + """ + if api_token is None: + api_token = os.getenv("KOYEB_API_TOKEN") + if not api_token: + raise ValueError( + "API token is required. Set KOYEB_API_TOKEN environment variable or pass api_token parameter" + ) + + if not id: + raise ValueError("id is required") + + _, services_api, _, _ = get_api_client(api_token) + deployments_api = DeploymentsApi(services_api.api_client) + + # Get service by ID + try: + service_response = services_api.get_service(id=id) + service = service_response.service + except NotFoundException as e: + raise SandboxError(f"Sandbox not found with id: {id}") from e + except ApiException as e: + raise SandboxError(f"Failed to retrieve sandbox with id: {id}: {e}") from e + + if service is None: + raise SandboxError(f"Sandbox not found with id: {id}") + + sandbox_name = service.name + + # Get deployment to extract sandbox_secret from env vars + deployment_id = service.active_deployment_id or service.latest_deployment_id + sandbox_secret = None + instance_id = None + + if deployment_id: + try: + deployment_response = deployments_api.get_deployment(id=deployment_id) + if ( + deployment_response.deployment + and deployment_response.deployment.definition + and deployment_response.deployment.definition.env + ): + # Find SANDBOX_SECRET in env vars + for env_var in deployment_response.deployment.definition.env: + if env_var.key == "SANDBOX_SECRET": + sandbox_secret = env_var.value + break + + # Get instance_id from deployment scaling + try: + scaling_response = deployments_api.get_deployment_scaling( + id=deployment_id + ) + if ( + scaling_response.replicas + and scaling_response.replicas[0].instances + and len(scaling_response.replicas[0].instances) > 0 + ): + instance_id = scaling_response.replicas[0].instances[0].id + except Exception: + logger.debug( + f"Could not get instance for deployment {deployment_id}" + ) + except Exception as e: + logger.debug(f"Could not get deployment {deployment_id}: {e}") + + if not instance_id: + raise SandboxError( + f"Could not find instance for sandbox {id}. " + "The sandbox may not be fully provisioned yet." + ) + + return cls( + sandbox_id=service.id, + app_id=service.app_id, + service_id=service.id, + instance_id=instance_id, + name=sandbox_name, + api_token=api_token, + sandbox_secret=sandbox_secret, + ) + def wait_ready( self, timeout: int = DEFAULT_INSTANCE_WAIT_TIMEOUT, @@ -739,6 +845,44 @@ async def _run_sync(self, method, *args, **kwargs): """ return await run_sync_in_executor(method, *args, **kwargs) + @classmethod + async def get_from_id( + cls, + id: str, + api_token: Optional[str] = None, + ) -> "AsyncSandbox": + """ + Get a sandbox by service ID asynchronously. + + Args: + id: Service ID of the sandbox + api_token: Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) + + Returns: + AsyncSandbox: The AsyncSandbox instance + + Raises: + ValueError: If API token is not provided or id is invalid + SandboxError: If sandbox is not found or retrieval fails + """ + sync_sandbox = await run_sync_in_executor( + Sandbox.get_from_id, id=id, api_token=api_token + ) + + # Convert Sandbox instance to AsyncSandbox instance + async_sandbox = cls( + sandbox_id=sync_sandbox.sandbox_id, + app_id=sync_sandbox.app_id, + service_id=sync_sandbox.service_id, + instance_id=sync_sandbox.instance_id, + name=sync_sandbox.name, + api_token=sync_sandbox.api_token, + sandbox_secret=sync_sandbox.sandbox_secret, + ) + async_sandbox._created_at = sync_sandbox._created_at + + return async_sandbox + @classmethod async def create( cls, From 11932d34798e1107d1f9477ce67a09c3d095c0c7 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Wed, 5 Nov 2025 14:12:07 +0100 Subject: [PATCH 40/47] Update sdk --- docs/api.md | 513 ++++++++++++++---- koyeb/api/api/logs_api.py | 320 +++++++---- koyeb/api/docs/DeploymentDefinitionType.md | 2 + koyeb/api/docs/LogsApi.md | 84 +-- koyeb/api/docs/Organization.md | 1 + .../docs/RegionalDeploymentDefinitionType.md | 2 + koyeb/api/docs/ServiceType.md | 2 + .../api/models/deployment_definition_type.py | 1 + koyeb/api/models/organization.py | 4 +- .../regional_deployment_definition_type.py | 1 + koyeb/api/models/service_type.py | 1 + spec/openapi.json | 101 +++- 12 files changed, 787 insertions(+), 245 deletions(-) diff --git a/docs/api.md b/docs/api.md index 1b2e6d44..f1209b37 100644 --- a/docs/api.md +++ b/docs/api.md @@ -51201,13 +51201,55 @@ Do not edit the class manually. ```python @validate_call def query_logs( - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, + type: Annotated[ + Optional[StrictStr], + Field( + description= + "Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\"." + )] = None, + app_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + service_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + regional_deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + instance_id: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using instance_ids instead." + )] = None, + instance_ids: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + stream: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs)." + )] = None, start: Annotated[ Optional[datetime], Field( @@ -51239,6 +51281,12 @@ def query_logs( description= "(Optional) Looks for this string in logs. Can't be used with `regex`." )] = None, + regions: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"])." + )] = None, _request_timeout: Union[None, Annotated[StrictFloat, Field(gt=0)], Tuple[Annotated[StrictFloat, @@ -51256,19 +51304,22 @@ Query logs **Arguments**: -- `type` (`str`): -- `app_id` (`str`): -- `service_id` (`str`): -- `deployment_id` (`str`): -- `instance_id` (`str`): -- `stream` (`str`): -- `regional_deployment_id` (`str`): +- `type` (`str`): Type of logs to retrieve, either "build" or "runtime". Defaults to "runtime". +- `app_id` (`str`): (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `service_id` (`str`): (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `deployment_id` (`str`): (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `regional_deployment_id` (`str`): (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `instance_id` (`str`): Deprecated, prefer using instance_ids instead. +- `instance_ids` (`List[str]`): (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `stream` (`str`): Deprecated, prefer using streams instead. +- `streams` (`List[str]`): (Optional) Filter on stream: either "stdout", "stderr" or "koyeb" (for system logs). - `start` (`datetime`): (Optional) Must always be before `end`. Defaults to 15 minutes ago. - `end` (`datetime`): (Optional) Must always be after `start`. Defaults to now. - `order` (`str`): (Optional) `asc` or `desc`. Defaults to `desc`. - `limit` (`str`): (Optional) Defaults to 100. Maximum of 1000. - `regex` (`str`): (Optional) Apply a regex to filter logs. Can't be used with `text`. - `text` (`str`): (Optional) Looks for this string in logs. Can't be used with `regex`. +- `regions` (`List[str]`): (Optional) Filter on the provided regions (e.g. ["fra", "was"]). - `_request_timeout` (`int, tuple(int, int), optional`): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -51295,13 +51346,55 @@ Returns the result object. ```python @validate_call def query_logs_with_http_info( - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, + type: Annotated[ + Optional[StrictStr], + Field( + description= + "Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\"." + )] = None, + app_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + service_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + regional_deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + instance_id: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using instance_ids instead." + )] = None, + instance_ids: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + stream: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs)." + )] = None, start: Annotated[ Optional[datetime], Field( @@ -51333,6 +51426,12 @@ def query_logs_with_http_info( description= "(Optional) Looks for this string in logs. Can't be used with `regex`." )] = None, + regions: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"])." + )] = None, _request_timeout: Union[None, Annotated[StrictFloat, Field(gt=0)], Tuple[Annotated[StrictFloat, @@ -51350,19 +51449,22 @@ Query logs **Arguments**: -- `type` (`str`): -- `app_id` (`str`): -- `service_id` (`str`): -- `deployment_id` (`str`): -- `instance_id` (`str`): -- `stream` (`str`): -- `regional_deployment_id` (`str`): +- `type` (`str`): Type of logs to retrieve, either "build" or "runtime". Defaults to "runtime". +- `app_id` (`str`): (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `service_id` (`str`): (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `deployment_id` (`str`): (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `regional_deployment_id` (`str`): (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `instance_id` (`str`): Deprecated, prefer using instance_ids instead. +- `instance_ids` (`List[str]`): (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `stream` (`str`): Deprecated, prefer using streams instead. +- `streams` (`List[str]`): (Optional) Filter on stream: either "stdout", "stderr" or "koyeb" (for system logs). - `start` (`datetime`): (Optional) Must always be before `end`. Defaults to 15 minutes ago. - `end` (`datetime`): (Optional) Must always be after `start`. Defaults to now. - `order` (`str`): (Optional) `asc` or `desc`. Defaults to `desc`. - `limit` (`str`): (Optional) Defaults to 100. Maximum of 1000. - `regex` (`str`): (Optional) Apply a regex to filter logs. Can't be used with `text`. - `text` (`str`): (Optional) Looks for this string in logs. Can't be used with `regex`. +- `regions` (`List[str]`): (Optional) Filter on the provided regions (e.g. ["fra", "was"]). - `_request_timeout` (`int, tuple(int, int), optional`): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -51389,13 +51491,55 @@ Returns the result object. ```python @validate_call def query_logs_without_preload_content( - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, + type: Annotated[ + Optional[StrictStr], + Field( + description= + "Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\"." + )] = None, + app_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + service_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + regional_deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + instance_id: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using instance_ids instead." + )] = None, + instance_ids: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + stream: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs)." + )] = None, start: Annotated[ Optional[datetime], Field( @@ -51427,6 +51571,12 @@ def query_logs_without_preload_content( description= "(Optional) Looks for this string in logs. Can't be used with `regex`." )] = None, + regions: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"])." + )] = None, _request_timeout: Union[None, Annotated[StrictFloat, Field(gt=0)], Tuple[Annotated[StrictFloat, @@ -51444,19 +51594,22 @@ Query logs **Arguments**: -- `type` (`str`): -- `app_id` (`str`): -- `service_id` (`str`): -- `deployment_id` (`str`): -- `instance_id` (`str`): -- `stream` (`str`): -- `regional_deployment_id` (`str`): +- `type` (`str`): Type of logs to retrieve, either "build" or "runtime". Defaults to "runtime". +- `app_id` (`str`): (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `service_id` (`str`): (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `deployment_id` (`str`): (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `regional_deployment_id` (`str`): (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `instance_id` (`str`): Deprecated, prefer using instance_ids instead. +- `instance_ids` (`List[str]`): (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `stream` (`str`): Deprecated, prefer using streams instead. +- `streams` (`List[str]`): (Optional) Filter on stream: either "stdout", "stderr" or "koyeb" (for system logs). - `start` (`datetime`): (Optional) Must always be before `end`. Defaults to 15 minutes ago. - `end` (`datetime`): (Optional) Must always be after `start`. Defaults to now. - `order` (`str`): (Optional) `asc` or `desc`. Defaults to `desc`. - `limit` (`str`): (Optional) Defaults to 100. Maximum of 1000. - `regex` (`str`): (Optional) Apply a regex to filter logs. Can't be used with `text`. - `text` (`str`): (Optional) Looks for this string in logs. Can't be used with `regex`. +- `regions` (`List[str]`): (Optional) Filter on the provided regions (e.g. ["fra", "was"]). - `_request_timeout` (`int, tuple(int, int), optional`): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -51483,15 +51636,62 @@ Returns the result object. ```python @validate_call def tail_logs( - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - start: Optional[datetime] = None, - limit: Optional[StrictStr] = None, + type: Annotated[ + Optional[StrictStr], + Field( + description= + "Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\"." + )] = None, + app_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + service_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + regional_deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + instance_id: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using instance_ids instead." + )] = None, + instance_ids: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + stream: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs)." + )] = None, + start: Annotated[ + Optional[datetime], + Field(description="(Optional) Defaults to 24 hours ago.")] = None, + limit: Annotated[ + Optional[StrictStr], + Field(description="(Optional) Defaults to 1000. Maximum of 1000." + )] = None, regex: Annotated[ Optional[StrictStr], Field( @@ -51504,6 +51704,12 @@ def tail_logs( description= "(Optional) Looks for this string in logs. Can't be used with `regex`." )] = None, + regions: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"])." + )] = None, _request_timeout: Union[None, Annotated[StrictFloat, Field(gt=0)], Tuple[Annotated[StrictFloat, @@ -51521,17 +51727,20 @@ Tails logs **Arguments**: -- `type` (`str`): -- `app_id` (`str`): -- `service_id` (`str`): -- `deployment_id` (`str`): -- `regional_deployment_id` (`str`): -- `instance_id` (`str`): -- `stream` (`str`): -- `start` (`datetime`): -- `limit` (`str`): +- `type` (`str`): Type of logs to retrieve, either "build" or "runtime". Defaults to "runtime". +- `app_id` (`str`): (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `service_id` (`str`): (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `deployment_id` (`str`): (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `regional_deployment_id` (`str`): (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `instance_id` (`str`): Deprecated, prefer using instance_ids instead. +- `instance_ids` (`List[str]`): (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `stream` (`str`): Deprecated, prefer using streams instead. +- `streams` (`List[str]`): (Optional) Filter on stream: either "stdout", "stderr" or "koyeb" (for system logs). +- `start` (`datetime`): (Optional) Defaults to 24 hours ago. +- `limit` (`str`): (Optional) Defaults to 1000. Maximum of 1000. - `regex` (`str`): (Optional) Apply a regex to filter logs. Can't be used with `text`. - `text` (`str`): (Optional) Looks for this string in logs. Can't be used with `regex`. +- `regions` (`List[str]`): (Optional) Filter on the provided regions (e.g. ["fra", "was"]). - `_request_timeout` (`int, tuple(int, int), optional`): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -51558,15 +51767,62 @@ Returns the result object. ```python @validate_call def tail_logs_with_http_info( - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - start: Optional[datetime] = None, - limit: Optional[StrictStr] = None, + type: Annotated[ + Optional[StrictStr], + Field( + description= + "Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\"." + )] = None, + app_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + service_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + regional_deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + instance_id: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using instance_ids instead." + )] = None, + instance_ids: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + stream: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs)." + )] = None, + start: Annotated[ + Optional[datetime], + Field(description="(Optional) Defaults to 24 hours ago.")] = None, + limit: Annotated[ + Optional[StrictStr], + Field(description="(Optional) Defaults to 1000. Maximum of 1000." + )] = None, regex: Annotated[ Optional[StrictStr], Field( @@ -51579,6 +51835,12 @@ def tail_logs_with_http_info( description= "(Optional) Looks for this string in logs. Can't be used with `regex`." )] = None, + regions: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"])." + )] = None, _request_timeout: Union[None, Annotated[StrictFloat, Field(gt=0)], Tuple[Annotated[StrictFloat, @@ -51596,17 +51858,20 @@ Tails logs **Arguments**: -- `type` (`str`): -- `app_id` (`str`): -- `service_id` (`str`): -- `deployment_id` (`str`): -- `regional_deployment_id` (`str`): -- `instance_id` (`str`): -- `stream` (`str`): -- `start` (`datetime`): -- `limit` (`str`): +- `type` (`str`): Type of logs to retrieve, either "build" or "runtime". Defaults to "runtime". +- `app_id` (`str`): (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `service_id` (`str`): (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `deployment_id` (`str`): (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `regional_deployment_id` (`str`): (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `instance_id` (`str`): Deprecated, prefer using instance_ids instead. +- `instance_ids` (`List[str]`): (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `stream` (`str`): Deprecated, prefer using streams instead. +- `streams` (`List[str]`): (Optional) Filter on stream: either "stdout", "stderr" or "koyeb" (for system logs). +- `start` (`datetime`): (Optional) Defaults to 24 hours ago. +- `limit` (`str`): (Optional) Defaults to 1000. Maximum of 1000. - `regex` (`str`): (Optional) Apply a regex to filter logs. Can't be used with `text`. - `text` (`str`): (Optional) Looks for this string in logs. Can't be used with `regex`. +- `regions` (`List[str]`): (Optional) Filter on the provided regions (e.g. ["fra", "was"]). - `_request_timeout` (`int, tuple(int, int), optional`): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -51633,15 +51898,62 @@ Returns the result object. ```python @validate_call def tail_logs_without_preload_content( - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - start: Optional[datetime] = None, - limit: Optional[StrictStr] = None, + type: Annotated[ + Optional[StrictStr], + Field( + description= + "Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\"." + )] = None, + app_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + service_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + regional_deployment_id: Annotated[ + Optional[StrictStr], + Field( + description= + "(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + instance_id: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using instance_ids instead." + )] = None, + instance_ids: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set." + )] = None, + stream: Annotated[ + Optional[StrictStr], + Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs)." + )] = None, + start: Annotated[ + Optional[datetime], + Field(description="(Optional) Defaults to 24 hours ago.")] = None, + limit: Annotated[ + Optional[StrictStr], + Field(description="(Optional) Defaults to 1000. Maximum of 1000." + )] = None, regex: Annotated[ Optional[StrictStr], Field( @@ -51654,6 +51966,12 @@ def tail_logs_without_preload_content( description= "(Optional) Looks for this string in logs. Can't be used with `regex`." )] = None, + regions: Annotated[ + Optional[List[StrictStr]], + Field( + description= + "(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"])." + )] = None, _request_timeout: Union[None, Annotated[StrictFloat, Field(gt=0)], Tuple[Annotated[StrictFloat, @@ -51671,17 +51989,20 @@ Tails logs **Arguments**: -- `type` (`str`): -- `app_id` (`str`): -- `service_id` (`str`): -- `deployment_id` (`str`): -- `regional_deployment_id` (`str`): -- `instance_id` (`str`): -- `stream` (`str`): -- `start` (`datetime`): -- `limit` (`str`): +- `type` (`str`): Type of logs to retrieve, either "build" or "runtime". Defaults to "runtime". +- `app_id` (`str`): (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `service_id` (`str`): (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `deployment_id` (`str`): (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `regional_deployment_id` (`str`): (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `instance_id` (`str`): Deprecated, prefer using instance_ids instead. +- `instance_ids` (`List[str]`): (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. +- `stream` (`str`): Deprecated, prefer using streams instead. +- `streams` (`List[str]`): (Optional) Filter on stream: either "stdout", "stderr" or "koyeb" (for system logs). +- `start` (`datetime`): (Optional) Defaults to 24 hours ago. +- `limit` (`str`): (Optional) Defaults to 1000. Maximum of 1000. - `regex` (`str`): (Optional) Apply a regex to filter logs. Can't be used with `text`. - `text` (`str`): (Optional) Looks for this string in logs. Can't be used with `regex`. +- `regions` (`List[str]`): (Optional) Filter on the provided regions (e.g. ["fra", "was"]). - `_request_timeout` (`int, tuple(int, int), optional`): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of diff --git a/koyeb/api/api/logs_api.py b/koyeb/api/api/logs_api.py index df6433b0..cfc7de79 100644 --- a/koyeb/api/api/logs_api.py +++ b/koyeb/api/api/logs_api.py @@ -18,7 +18,7 @@ from datetime import datetime from pydantic import Field, StrictStr -from typing import Optional +from typing import List, Optional from typing_extensions import Annotated from koyeb.api.models.query_logs_reply import QueryLogsReply from koyeb.api.models.stream_result_of_log_entry import StreamResultOfLogEntry @@ -44,19 +44,22 @@ def __init__(self, api_client=None) -> None: @validate_call def query_logs( self, - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, + type: Annotated[Optional[StrictStr], Field(description="Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\".")] = None, + app_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + service_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + regional_deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + instance_id: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using instance_ids instead.")] = None, + instance_ids: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + stream: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs).")] = None, start: Annotated[Optional[datetime], Field(description="(Optional) Must always be before `end`. Defaults to 15 minutes ago.")] = None, end: Annotated[Optional[datetime], Field(description="(Optional) Must always be after `start`. Defaults to now.")] = None, order: Annotated[Optional[StrictStr], Field(description="(Optional) `asc` or `desc`. Defaults to `desc`.")] = None, limit: Annotated[Optional[StrictStr], Field(description="(Optional) Defaults to 100. Maximum of 1000.")] = None, regex: Annotated[Optional[StrictStr], Field(description="(Optional) Apply a regex to filter logs. Can't be used with `text`.")] = None, text: Annotated[Optional[StrictStr], Field(description="(Optional) Looks for this string in logs. Can't be used with `regex`.")] = None, + regions: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]).")] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -73,20 +76,24 @@ def query_logs( """Query logs - :param type: + :param type: Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\". :type type: str - :param app_id: + :param app_id: (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type app_id: str - :param service_id: + :param service_id: (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type service_id: str - :param deployment_id: + :param deployment_id: (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type deployment_id: str - :param instance_id: + :param regional_deployment_id: (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. + :type regional_deployment_id: str + :param instance_id: Deprecated, prefer using instance_ids instead. :type instance_id: str - :param stream: + :param instance_ids: (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. + :type instance_ids: List[str] + :param stream: Deprecated, prefer using streams instead. :type stream: str - :param regional_deployment_id: - :type regional_deployment_id: str + :param streams: (Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs). + :type streams: List[str] :param start: (Optional) Must always be before `end`. Defaults to 15 minutes ago. :type start: datetime :param end: (Optional) Must always be after `start`. Defaults to now. @@ -99,6 +106,8 @@ def query_logs( :type regex: str :param text: (Optional) Looks for this string in logs. Can't be used with `regex`. :type text: str + :param regions: (Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]). + :type regions: List[str] :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -126,15 +135,18 @@ def query_logs( app_id=app_id, service_id=service_id, deployment_id=deployment_id, + regional_deployment_id=regional_deployment_id, instance_id=instance_id, + instance_ids=instance_ids, stream=stream, - regional_deployment_id=regional_deployment_id, + streams=streams, start=start, end=end, order=order, limit=limit, regex=regex, text=text, + regions=regions, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -164,19 +176,22 @@ def query_logs( @validate_call def query_logs_with_http_info( self, - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, + type: Annotated[Optional[StrictStr], Field(description="Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\".")] = None, + app_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + service_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + regional_deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + instance_id: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using instance_ids instead.")] = None, + instance_ids: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + stream: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs).")] = None, start: Annotated[Optional[datetime], Field(description="(Optional) Must always be before `end`. Defaults to 15 minutes ago.")] = None, end: Annotated[Optional[datetime], Field(description="(Optional) Must always be after `start`. Defaults to now.")] = None, order: Annotated[Optional[StrictStr], Field(description="(Optional) `asc` or `desc`. Defaults to `desc`.")] = None, limit: Annotated[Optional[StrictStr], Field(description="(Optional) Defaults to 100. Maximum of 1000.")] = None, regex: Annotated[Optional[StrictStr], Field(description="(Optional) Apply a regex to filter logs. Can't be used with `text`.")] = None, text: Annotated[Optional[StrictStr], Field(description="(Optional) Looks for this string in logs. Can't be used with `regex`.")] = None, + regions: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]).")] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -193,20 +208,24 @@ def query_logs_with_http_info( """Query logs - :param type: + :param type: Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\". :type type: str - :param app_id: + :param app_id: (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type app_id: str - :param service_id: + :param service_id: (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type service_id: str - :param deployment_id: + :param deployment_id: (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type deployment_id: str - :param instance_id: + :param regional_deployment_id: (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. + :type regional_deployment_id: str + :param instance_id: Deprecated, prefer using instance_ids instead. :type instance_id: str - :param stream: + :param instance_ids: (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. + :type instance_ids: List[str] + :param stream: Deprecated, prefer using streams instead. :type stream: str - :param regional_deployment_id: - :type regional_deployment_id: str + :param streams: (Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs). + :type streams: List[str] :param start: (Optional) Must always be before `end`. Defaults to 15 minutes ago. :type start: datetime :param end: (Optional) Must always be after `start`. Defaults to now. @@ -219,6 +238,8 @@ def query_logs_with_http_info( :type regex: str :param text: (Optional) Looks for this string in logs. Can't be used with `regex`. :type text: str + :param regions: (Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]). + :type regions: List[str] :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -246,15 +267,18 @@ def query_logs_with_http_info( app_id=app_id, service_id=service_id, deployment_id=deployment_id, + regional_deployment_id=regional_deployment_id, instance_id=instance_id, + instance_ids=instance_ids, stream=stream, - regional_deployment_id=regional_deployment_id, + streams=streams, start=start, end=end, order=order, limit=limit, regex=regex, text=text, + regions=regions, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -284,19 +308,22 @@ def query_logs_with_http_info( @validate_call def query_logs_without_preload_content( self, - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, + type: Annotated[Optional[StrictStr], Field(description="Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\".")] = None, + app_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + service_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + regional_deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + instance_id: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using instance_ids instead.")] = None, + instance_ids: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + stream: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs).")] = None, start: Annotated[Optional[datetime], Field(description="(Optional) Must always be before `end`. Defaults to 15 minutes ago.")] = None, end: Annotated[Optional[datetime], Field(description="(Optional) Must always be after `start`. Defaults to now.")] = None, order: Annotated[Optional[StrictStr], Field(description="(Optional) `asc` or `desc`. Defaults to `desc`.")] = None, limit: Annotated[Optional[StrictStr], Field(description="(Optional) Defaults to 100. Maximum of 1000.")] = None, regex: Annotated[Optional[StrictStr], Field(description="(Optional) Apply a regex to filter logs. Can't be used with `text`.")] = None, text: Annotated[Optional[StrictStr], Field(description="(Optional) Looks for this string in logs. Can't be used with `regex`.")] = None, + regions: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]).")] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -313,20 +340,24 @@ def query_logs_without_preload_content( """Query logs - :param type: + :param type: Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\". :type type: str - :param app_id: + :param app_id: (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type app_id: str - :param service_id: + :param service_id: (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type service_id: str - :param deployment_id: + :param deployment_id: (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type deployment_id: str - :param instance_id: + :param regional_deployment_id: (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. + :type regional_deployment_id: str + :param instance_id: Deprecated, prefer using instance_ids instead. :type instance_id: str - :param stream: + :param instance_ids: (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. + :type instance_ids: List[str] + :param stream: Deprecated, prefer using streams instead. :type stream: str - :param regional_deployment_id: - :type regional_deployment_id: str + :param streams: (Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs). + :type streams: List[str] :param start: (Optional) Must always be before `end`. Defaults to 15 minutes ago. :type start: datetime :param end: (Optional) Must always be after `start`. Defaults to now. @@ -339,6 +370,8 @@ def query_logs_without_preload_content( :type regex: str :param text: (Optional) Looks for this string in logs. Can't be used with `regex`. :type text: str + :param regions: (Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]). + :type regions: List[str] :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -366,15 +399,18 @@ def query_logs_without_preload_content( app_id=app_id, service_id=service_id, deployment_id=deployment_id, + regional_deployment_id=regional_deployment_id, instance_id=instance_id, + instance_ids=instance_ids, stream=stream, - regional_deployment_id=regional_deployment_id, + streams=streams, start=start, end=end, order=order, limit=limit, regex=regex, text=text, + regions=regions, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -403,15 +439,18 @@ def _query_logs_serialize( app_id, service_id, deployment_id, + regional_deployment_id, instance_id, + instance_ids, stream, - regional_deployment_id, + streams, start, end, order, limit, regex, text, + regions, _request_auth, _content_type, _headers, @@ -421,6 +460,9 @@ def _query_logs_serialize( _host = None _collection_formats: Dict[str, str] = { + 'instance_ids': 'multi', + 'streams': 'multi', + 'regions': 'multi', } _path_params: Dict[str, str] = {} @@ -450,17 +492,25 @@ def _query_logs_serialize( _query_params.append(('deployment_id', deployment_id)) + if regional_deployment_id is not None: + + _query_params.append(('regional_deployment_id', regional_deployment_id)) + if instance_id is not None: _query_params.append(('instance_id', instance_id)) + if instance_ids is not None: + + _query_params.append(('instance_ids', instance_ids)) + if stream is not None: _query_params.append(('stream', stream)) - if regional_deployment_id is not None: + if streams is not None: - _query_params.append(('regional_deployment_id', regional_deployment_id)) + _query_params.append(('streams', streams)) if start is not None: if isinstance(start, datetime): @@ -504,6 +554,10 @@ def _query_logs_serialize( _query_params.append(('text', text)) + if regions is not None: + + _query_params.append(('regions', regions)) + # process the header parameters # process the form parameters # process the body parameter @@ -544,17 +598,20 @@ def _query_logs_serialize( @validate_call def tail_logs( self, - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - start: Optional[datetime] = None, - limit: Optional[StrictStr] = None, + type: Annotated[Optional[StrictStr], Field(description="Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\".")] = None, + app_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + service_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + regional_deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + instance_id: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using instance_ids instead.")] = None, + instance_ids: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + stream: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs).")] = None, + start: Annotated[Optional[datetime], Field(description="(Optional) Defaults to 24 hours ago.")] = None, + limit: Annotated[Optional[StrictStr], Field(description="(Optional) Defaults to 1000. Maximum of 1000.")] = None, regex: Annotated[Optional[StrictStr], Field(description="(Optional) Apply a regex to filter logs. Can't be used with `text`.")] = None, text: Annotated[Optional[StrictStr], Field(description="(Optional) Looks for this string in logs. Can't be used with `regex`.")] = None, + regions: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]).")] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -571,28 +628,34 @@ def tail_logs( """Tails logs - :param type: + :param type: Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\". :type type: str - :param app_id: + :param app_id: (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type app_id: str - :param service_id: + :param service_id: (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type service_id: str - :param deployment_id: + :param deployment_id: (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type deployment_id: str - :param regional_deployment_id: + :param regional_deployment_id: (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type regional_deployment_id: str - :param instance_id: + :param instance_id: Deprecated, prefer using instance_ids instead. :type instance_id: str - :param stream: + :param instance_ids: (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. + :type instance_ids: List[str] + :param stream: Deprecated, prefer using streams instead. :type stream: str - :param start: + :param streams: (Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs). + :type streams: List[str] + :param start: (Optional) Defaults to 24 hours ago. :type start: datetime - :param limit: + :param limit: (Optional) Defaults to 1000. Maximum of 1000. :type limit: str :param regex: (Optional) Apply a regex to filter logs. Can't be used with `text`. :type regex: str :param text: (Optional) Looks for this string in logs. Can't be used with `regex`. :type text: str + :param regions: (Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]). + :type regions: List[str] :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -622,11 +685,14 @@ def tail_logs( deployment_id=deployment_id, regional_deployment_id=regional_deployment_id, instance_id=instance_id, + instance_ids=instance_ids, stream=stream, + streams=streams, start=start, limit=limit, regex=regex, text=text, + regions=regions, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -656,17 +722,20 @@ def tail_logs( @validate_call def tail_logs_with_http_info( self, - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - start: Optional[datetime] = None, - limit: Optional[StrictStr] = None, + type: Annotated[Optional[StrictStr], Field(description="Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\".")] = None, + app_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + service_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + regional_deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + instance_id: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using instance_ids instead.")] = None, + instance_ids: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + stream: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs).")] = None, + start: Annotated[Optional[datetime], Field(description="(Optional) Defaults to 24 hours ago.")] = None, + limit: Annotated[Optional[StrictStr], Field(description="(Optional) Defaults to 1000. Maximum of 1000.")] = None, regex: Annotated[Optional[StrictStr], Field(description="(Optional) Apply a regex to filter logs. Can't be used with `text`.")] = None, text: Annotated[Optional[StrictStr], Field(description="(Optional) Looks for this string in logs. Can't be used with `regex`.")] = None, + regions: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]).")] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -683,28 +752,34 @@ def tail_logs_with_http_info( """Tails logs - :param type: + :param type: Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\". :type type: str - :param app_id: + :param app_id: (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type app_id: str - :param service_id: + :param service_id: (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type service_id: str - :param deployment_id: + :param deployment_id: (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type deployment_id: str - :param regional_deployment_id: + :param regional_deployment_id: (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type regional_deployment_id: str - :param instance_id: + :param instance_id: Deprecated, prefer using instance_ids instead. :type instance_id: str - :param stream: + :param instance_ids: (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. + :type instance_ids: List[str] + :param stream: Deprecated, prefer using streams instead. :type stream: str - :param start: + :param streams: (Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs). + :type streams: List[str] + :param start: (Optional) Defaults to 24 hours ago. :type start: datetime - :param limit: + :param limit: (Optional) Defaults to 1000. Maximum of 1000. :type limit: str :param regex: (Optional) Apply a regex to filter logs. Can't be used with `text`. :type regex: str :param text: (Optional) Looks for this string in logs. Can't be used with `regex`. :type text: str + :param regions: (Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]). + :type regions: List[str] :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -734,11 +809,14 @@ def tail_logs_with_http_info( deployment_id=deployment_id, regional_deployment_id=regional_deployment_id, instance_id=instance_id, + instance_ids=instance_ids, stream=stream, + streams=streams, start=start, limit=limit, regex=regex, text=text, + regions=regions, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -768,17 +846,20 @@ def tail_logs_with_http_info( @validate_call def tail_logs_without_preload_content( self, - type: Optional[StrictStr] = None, - app_id: Optional[StrictStr] = None, - service_id: Optional[StrictStr] = None, - deployment_id: Optional[StrictStr] = None, - regional_deployment_id: Optional[StrictStr] = None, - instance_id: Optional[StrictStr] = None, - stream: Optional[StrictStr] = None, - start: Optional[datetime] = None, - limit: Optional[StrictStr] = None, + type: Annotated[Optional[StrictStr], Field(description="Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\".")] = None, + app_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + service_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + regional_deployment_id: Annotated[Optional[StrictStr], Field(description="(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + instance_id: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using instance_ids instead.")] = None, + instance_ids: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.")] = None, + stream: Annotated[Optional[StrictStr], Field(description="Deprecated, prefer using streams instead.")] = None, + streams: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs).")] = None, + start: Annotated[Optional[datetime], Field(description="(Optional) Defaults to 24 hours ago.")] = None, + limit: Annotated[Optional[StrictStr], Field(description="(Optional) Defaults to 1000. Maximum of 1000.")] = None, regex: Annotated[Optional[StrictStr], Field(description="(Optional) Apply a regex to filter logs. Can't be used with `text`.")] = None, text: Annotated[Optional[StrictStr], Field(description="(Optional) Looks for this string in logs. Can't be used with `regex`.")] = None, + regions: Annotated[Optional[List[StrictStr]], Field(description="(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]).")] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -795,28 +876,34 @@ def tail_logs_without_preload_content( """Tails logs - :param type: + :param type: Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\". :type type: str - :param app_id: + :param app_id: (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type app_id: str - :param service_id: + :param service_id: (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type service_id: str - :param deployment_id: + :param deployment_id: (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type deployment_id: str - :param regional_deployment_id: + :param regional_deployment_id: (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. :type regional_deployment_id: str - :param instance_id: + :param instance_id: Deprecated, prefer using instance_ids instead. :type instance_id: str - :param stream: + :param instance_ids: (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. + :type instance_ids: List[str] + :param stream: Deprecated, prefer using streams instead. :type stream: str - :param start: + :param streams: (Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs). + :type streams: List[str] + :param start: (Optional) Defaults to 24 hours ago. :type start: datetime - :param limit: + :param limit: (Optional) Defaults to 1000. Maximum of 1000. :type limit: str :param regex: (Optional) Apply a regex to filter logs. Can't be used with `text`. :type regex: str :param text: (Optional) Looks for this string in logs. Can't be used with `regex`. :type text: str + :param regions: (Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]). + :type regions: List[str] :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -846,11 +933,14 @@ def tail_logs_without_preload_content( deployment_id=deployment_id, regional_deployment_id=regional_deployment_id, instance_id=instance_id, + instance_ids=instance_ids, stream=stream, + streams=streams, start=start, limit=limit, regex=regex, text=text, + regions=regions, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -881,11 +971,14 @@ def _tail_logs_serialize( deployment_id, regional_deployment_id, instance_id, + instance_ids, stream, + streams, start, limit, regex, text, + regions, _request_auth, _content_type, _headers, @@ -895,6 +988,9 @@ def _tail_logs_serialize( _host = None _collection_formats: Dict[str, str] = { + 'instance_ids': 'multi', + 'streams': 'multi', + 'regions': 'multi', } _path_params: Dict[str, str] = {} @@ -932,10 +1028,18 @@ def _tail_logs_serialize( _query_params.append(('instance_id', instance_id)) + if instance_ids is not None: + + _query_params.append(('instance_ids', instance_ids)) + if stream is not None: _query_params.append(('stream', stream)) + if streams is not None: + + _query_params.append(('streams', streams)) + if start is not None: if isinstance(start, datetime): _query_params.append( @@ -961,6 +1065,10 @@ def _tail_logs_serialize( _query_params.append(('text', text)) + if regions is not None: + + _query_params.append(('regions', regions)) + # process the header parameters # process the form parameters # process the body parameter diff --git a/koyeb/api/docs/DeploymentDefinitionType.md b/koyeb/api/docs/DeploymentDefinitionType.md index 0a2d7a71..5e50e67b 100644 --- a/koyeb/api/docs/DeploymentDefinitionType.md +++ b/koyeb/api/docs/DeploymentDefinitionType.md @@ -11,6 +11,8 @@ * `DATABASE` (value: `'DATABASE'`) +* `SANDBOX` (value: `'SANDBOX'`) + [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/koyeb/api/docs/LogsApi.md b/koyeb/api/docs/LogsApi.md index 18610da8..4e9d54ea 100644 --- a/koyeb/api/docs/LogsApi.md +++ b/koyeb/api/docs/LogsApi.md @@ -9,7 +9,7 @@ Method | HTTP request | Description # **query_logs** -> QueryLogsReply query_logs(type=type, app_id=app_id, service_id=service_id, deployment_id=deployment_id, instance_id=instance_id, stream=stream, regional_deployment_id=regional_deployment_id, start=start, end=end, order=order, limit=limit, regex=regex, text=text) +> QueryLogsReply query_logs(type=type, app_id=app_id, service_id=service_id, deployment_id=deployment_id, regional_deployment_id=regional_deployment_id, instance_id=instance_id, instance_ids=instance_ids, stream=stream, streams=streams, start=start, end=end, order=order, limit=limit, regex=regex, text=text, regions=regions) Query logs @@ -44,23 +44,26 @@ configuration.api_key['Bearer'] = os.environ["API_KEY"] with koyeb.api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = koyeb.api.LogsApi(api_client) - type = 'type_example' # str | (optional) - app_id = 'app_id_example' # str | (optional) - service_id = 'service_id_example' # str | (optional) - deployment_id = 'deployment_id_example' # str | (optional) - instance_id = 'instance_id_example' # str | (optional) - stream = 'stream_example' # str | (optional) - regional_deployment_id = 'regional_deployment_id_example' # str | (optional) + type = 'type_example' # str | Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\". (optional) + app_id = 'app_id_example' # str | (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. (optional) + service_id = 'service_id_example' # str | (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. (optional) + deployment_id = 'deployment_id_example' # str | (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. (optional) + regional_deployment_id = 'regional_deployment_id_example' # str | (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. (optional) + instance_id = 'instance_id_example' # str | Deprecated, prefer using instance_ids instead. (optional) + instance_ids = ['instance_ids_example'] # List[str] | (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. (optional) + stream = 'stream_example' # str | Deprecated, prefer using streams instead. (optional) + streams = ['streams_example'] # List[str] | (Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs). (optional) start = '2013-10-20T19:20:30+01:00' # datetime | (Optional) Must always be before `end`. Defaults to 15 minutes ago. (optional) end = '2013-10-20T19:20:30+01:00' # datetime | (Optional) Must always be after `start`. Defaults to now. (optional) order = 'order_example' # str | (Optional) `asc` or `desc`. Defaults to `desc`. (optional) limit = 'limit_example' # str | (Optional) Defaults to 100. Maximum of 1000. (optional) regex = 'regex_example' # str | (Optional) Apply a regex to filter logs. Can't be used with `text`. (optional) text = 'text_example' # str | (Optional) Looks for this string in logs. Can't be used with `regex`. (optional) + regions = ['regions_example'] # List[str] | (Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]). (optional) try: # Query logs - api_response = api_instance.query_logs(type=type, app_id=app_id, service_id=service_id, deployment_id=deployment_id, instance_id=instance_id, stream=stream, regional_deployment_id=regional_deployment_id, start=start, end=end, order=order, limit=limit, regex=regex, text=text) + api_response = api_instance.query_logs(type=type, app_id=app_id, service_id=service_id, deployment_id=deployment_id, regional_deployment_id=regional_deployment_id, instance_id=instance_id, instance_ids=instance_ids, stream=stream, streams=streams, start=start, end=end, order=order, limit=limit, regex=regex, text=text, regions=regions) print("The response of LogsApi->query_logs:\n") pprint(api_response) except Exception as e: @@ -74,19 +77,22 @@ with koyeb.api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **type** | **str**| | [optional] - **app_id** | **str**| | [optional] - **service_id** | **str**| | [optional] - **deployment_id** | **str**| | [optional] - **instance_id** | **str**| | [optional] - **stream** | **str**| | [optional] - **regional_deployment_id** | **str**| | [optional] + **type** | **str**| Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\". | [optional] + **app_id** | **str**| (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. | [optional] + **service_id** | **str**| (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. | [optional] + **deployment_id** | **str**| (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. | [optional] + **regional_deployment_id** | **str**| (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. | [optional] + **instance_id** | **str**| Deprecated, prefer using instance_ids instead. | [optional] + **instance_ids** | [**List[str]**](str.md)| (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. | [optional] + **stream** | **str**| Deprecated, prefer using streams instead. | [optional] + **streams** | [**List[str]**](str.md)| (Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs). | [optional] **start** | **datetime**| (Optional) Must always be before `end`. Defaults to 15 minutes ago. | [optional] **end** | **datetime**| (Optional) Must always be after `start`. Defaults to now. | [optional] **order** | **str**| (Optional) `asc` or `desc`. Defaults to `desc`. | [optional] **limit** | **str**| (Optional) Defaults to 100. Maximum of 1000. | [optional] **regex** | **str**| (Optional) Apply a regex to filter logs. Can't be used with `text`. | [optional] **text** | **str**| (Optional) Looks for this string in logs. Can't be used with `regex`. | [optional] + **regions** | [**List[str]**](str.md)| (Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]). | [optional] ### Return type @@ -117,7 +123,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **tail_logs** -> StreamResultOfLogEntry tail_logs(type=type, app_id=app_id, service_id=service_id, deployment_id=deployment_id, regional_deployment_id=regional_deployment_id, instance_id=instance_id, stream=stream, start=start, limit=limit, regex=regex, text=text) +> StreamResultOfLogEntry tail_logs(type=type, app_id=app_id, service_id=service_id, deployment_id=deployment_id, regional_deployment_id=regional_deployment_id, instance_id=instance_id, instance_ids=instance_ids, stream=stream, streams=streams, start=start, limit=limit, regex=regex, text=text, regions=regions) Tails logs @@ -152,21 +158,24 @@ configuration.api_key['Bearer'] = os.environ["API_KEY"] with koyeb.api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = koyeb.api.LogsApi(api_client) - type = 'type_example' # str | (optional) - app_id = 'app_id_example' # str | (optional) - service_id = 'service_id_example' # str | (optional) - deployment_id = 'deployment_id_example' # str | (optional) - regional_deployment_id = 'regional_deployment_id_example' # str | (optional) - instance_id = 'instance_id_example' # str | (optional) - stream = 'stream_example' # str | (optional) - start = '2013-10-20T19:20:30+01:00' # datetime | (optional) - limit = 'limit_example' # str | (optional) + type = 'type_example' # str | Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\". (optional) + app_id = 'app_id_example' # str | (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. (optional) + service_id = 'service_id_example' # str | (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. (optional) + deployment_id = 'deployment_id_example' # str | (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. (optional) + regional_deployment_id = 'regional_deployment_id_example' # str | (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. (optional) + instance_id = 'instance_id_example' # str | Deprecated, prefer using instance_ids instead. (optional) + instance_ids = ['instance_ids_example'] # List[str] | (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. (optional) + stream = 'stream_example' # str | Deprecated, prefer using streams instead. (optional) + streams = ['streams_example'] # List[str] | (Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs). (optional) + start = '2013-10-20T19:20:30+01:00' # datetime | (Optional) Defaults to 24 hours ago. (optional) + limit = 'limit_example' # str | (Optional) Defaults to 1000. Maximum of 1000. (optional) regex = 'regex_example' # str | (Optional) Apply a regex to filter logs. Can't be used with `text`. (optional) text = 'text_example' # str | (Optional) Looks for this string in logs. Can't be used with `regex`. (optional) + regions = ['regions_example'] # List[str] | (Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]). (optional) try: # Tails logs - api_response = api_instance.tail_logs(type=type, app_id=app_id, service_id=service_id, deployment_id=deployment_id, regional_deployment_id=regional_deployment_id, instance_id=instance_id, stream=stream, start=start, limit=limit, regex=regex, text=text) + api_response = api_instance.tail_logs(type=type, app_id=app_id, service_id=service_id, deployment_id=deployment_id, regional_deployment_id=regional_deployment_id, instance_id=instance_id, instance_ids=instance_ids, stream=stream, streams=streams, start=start, limit=limit, regex=regex, text=text, regions=regions) print("The response of LogsApi->tail_logs:\n") pprint(api_response) except Exception as e: @@ -180,17 +189,20 @@ with koyeb.api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **type** | **str**| | [optional] - **app_id** | **str**| | [optional] - **service_id** | **str**| | [optional] - **deployment_id** | **str**| | [optional] - **regional_deployment_id** | **str**| | [optional] - **instance_id** | **str**| | [optional] - **stream** | **str**| | [optional] - **start** | **datetime**| | [optional] - **limit** | **str**| | [optional] + **type** | **str**| Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\". | [optional] + **app_id** | **str**| (Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. | [optional] + **service_id** | **str**| (Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. | [optional] + **deployment_id** | **str**| (Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. | [optional] + **regional_deployment_id** | **str**| (Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. | [optional] + **instance_id** | **str**| Deprecated, prefer using instance_ids instead. | [optional] + **instance_ids** | [**List[str]**](str.md)| (Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set. | [optional] + **stream** | **str**| Deprecated, prefer using streams instead. | [optional] + **streams** | [**List[str]**](str.md)| (Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs). | [optional] + **start** | **datetime**| (Optional) Defaults to 24 hours ago. | [optional] + **limit** | **str**| (Optional) Defaults to 1000. Maximum of 1000. | [optional] **regex** | **str**| (Optional) Apply a regex to filter logs. Can't be used with `text`. | [optional] **text** | **str**| (Optional) Looks for this string in logs. Can't be used with `regex`. | [optional] + **regions** | [**List[str]**](str.md)| (Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]). | [optional] ### Return type diff --git a/koyeb/api/docs/Organization.md b/koyeb/api/docs/Organization.md index b11b379c..5ddaf0fe 100644 --- a/koyeb/api/docs/Organization.md +++ b/koyeb/api/docs/Organization.md @@ -7,6 +7,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **id** | **str** | | [optional] **external_id** | **str** | | [optional] +**provisioning** | **bool** | | [optional] **address1** | **str** | | [optional] **address2** | **str** | | [optional] **city** | **str** | | [optional] diff --git a/koyeb/api/docs/RegionalDeploymentDefinitionType.md b/koyeb/api/docs/RegionalDeploymentDefinitionType.md index fd038f8c..54dc149e 100644 --- a/koyeb/api/docs/RegionalDeploymentDefinitionType.md +++ b/koyeb/api/docs/RegionalDeploymentDefinitionType.md @@ -9,6 +9,8 @@ * `WORKER` (value: `'WORKER'`) +* `SANDBOX` (value: `'SANDBOX'`) + [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/koyeb/api/docs/ServiceType.md b/koyeb/api/docs/ServiceType.md index e6dd167c..688b1198 100644 --- a/koyeb/api/docs/ServiceType.md +++ b/koyeb/api/docs/ServiceType.md @@ -11,6 +11,8 @@ * `DATABASE` (value: `'DATABASE'`) +* `SANDBOX` (value: `'SANDBOX'`) + [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/koyeb/api/models/deployment_definition_type.py b/koyeb/api/models/deployment_definition_type.py index ab9668ba..4897cba4 100644 --- a/koyeb/api/models/deployment_definition_type.py +++ b/koyeb/api/models/deployment_definition_type.py @@ -30,6 +30,7 @@ class DeploymentDefinitionType(str, Enum): WEB = 'WEB' WORKER = 'WORKER' DATABASE = 'DATABASE' + SANDBOX = 'SANDBOX' @classmethod def from_json(cls, json_str: str) -> Self: diff --git a/koyeb/api/models/organization.py b/koyeb/api/models/organization.py index 2bf33861..48114d79 100644 --- a/koyeb/api/models/organization.py +++ b/koyeb/api/models/organization.py @@ -33,6 +33,7 @@ class Organization(BaseModel): """ # noqa: E501 id: Optional[StrictStr] = None external_id: Optional[StrictStr] = None + provisioning: Optional[StrictBool] = None address1: Optional[StrictStr] = None address2: Optional[StrictStr] = None city: Optional[StrictStr] = None @@ -61,7 +62,7 @@ class Organization(BaseModel): trial_starts_at: Optional[datetime] = None trial_ends_at: Optional[datetime] = None email_domain_allowlist: Optional[List[StrictStr]] = None - __properties: ClassVar[List[str]] = ["id", "external_id", "address1", "address2", "city", "postal_code", "state", "country", "company", "vat_number", "billing_name", "billing_email", "name", "plan", "plan_updated_at", "has_payment_method", "subscription_id", "current_subscription_id", "latest_subscription_id", "signup_qualification", "status", "status_message", "deactivation_reason", "verified", "qualifies_for_hobby23", "reprocess_after", "trialing", "trial_starts_at", "trial_ends_at", "email_domain_allowlist"] + __properties: ClassVar[List[str]] = ["id", "external_id", "provisioning", "address1", "address2", "city", "postal_code", "state", "country", "company", "vat_number", "billing_name", "billing_email", "name", "plan", "plan_updated_at", "has_payment_method", "subscription_id", "current_subscription_id", "latest_subscription_id", "signup_qualification", "status", "status_message", "deactivation_reason", "verified", "qualifies_for_hobby23", "reprocess_after", "trialing", "trial_starts_at", "trial_ends_at", "email_domain_allowlist"] model_config = ConfigDict( populate_by_name=True, @@ -116,6 +117,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: _obj = cls.model_validate({ "id": obj.get("id"), "external_id": obj.get("external_id"), + "provisioning": obj.get("provisioning"), "address1": obj.get("address1"), "address2": obj.get("address2"), "city": obj.get("city"), diff --git a/koyeb/api/models/regional_deployment_definition_type.py b/koyeb/api/models/regional_deployment_definition_type.py index c1624920..377ae05d 100644 --- a/koyeb/api/models/regional_deployment_definition_type.py +++ b/koyeb/api/models/regional_deployment_definition_type.py @@ -29,6 +29,7 @@ class RegionalDeploymentDefinitionType(str, Enum): INVALID = 'INVALID' WEB = 'WEB' WORKER = 'WORKER' + SANDBOX = 'SANDBOX' @classmethod def from_json(cls, json_str: str) -> Self: diff --git a/koyeb/api/models/service_type.py b/koyeb/api/models/service_type.py index cade14be..684e867e 100644 --- a/koyeb/api/models/service_type.py +++ b/koyeb/api/models/service_type.py @@ -30,6 +30,7 @@ class ServiceType(str, Enum): WEB = 'WEB' WORKER = 'WORKER' DATABASE = 'DATABASE' + SANDBOX = 'SANDBOX' @classmethod def from_json(cls, json_str: str) -> Self: diff --git a/spec/openapi.json b/spec/openapi.json index a20d2265..1f0f43c4 100644 --- a/spec/openapi.json +++ b/spec/openapi.json @@ -3900,7 +3900,8 @@ "INVALID_TYPE", "WEB", "WORKER", - "DATABASE" + "DATABASE", + "SANDBOX" ] }, "collectionFormat": "multi" @@ -6285,45 +6286,74 @@ "parameters": [ { "name": "type", + "description": "Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\".", "in": "query", "required": false, "type": "string" }, { "name": "app_id", + "description": "(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.", "in": "query", "required": false, "type": "string" }, { "name": "service_id", + "description": "(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.", "in": "query", "required": false, "type": "string" }, { "name": "deployment_id", + "description": "(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "regional_deployment_id", + "description": "(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.", "in": "query", "required": false, "type": "string" }, { "name": "instance_id", + "description": "Deprecated, prefer using instance_ids instead.", "in": "query", "required": false, "type": "string" }, + { + "name": "instance_ids", + "description": "(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + }, { "name": "stream", + "description": "Deprecated, prefer using streams instead.", "in": "query", "required": false, "type": "string" }, { - "name": "regional_deployment_id", + "name": "streams", + "description": "(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs).", "in": "query", "required": false, - "type": "string" + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" }, { "name": "start", @@ -6369,6 +6399,17 @@ "in": "query", "required": false, "type": "string" + }, + { + "name": "regions", + "description": "(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]).", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" } ], "tags": [ @@ -6442,48 +6483,78 @@ "parameters": [ { "name": "type", + "description": "Type of logs to retrieve, either \"build\" or \"runtime\". Defaults to \"runtime\".", "in": "query", "required": false, "type": "string" }, { "name": "app_id", + "description": "(Optional) Filter on the provided app_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.", "in": "query", "required": false, "type": "string" }, { "name": "service_id", + "description": "(Optional) Filter on the provided service_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.", "in": "query", "required": false, "type": "string" }, { "name": "deployment_id", + "description": "(Optional) Filter on the provided deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.", "in": "query", "required": false, "type": "string" }, { "name": "regional_deployment_id", + "description": "(Optional) Filter on the provided regional_deployment_id. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.", "in": "query", "required": false, "type": "string" }, { "name": "instance_id", + "description": "Deprecated, prefer using instance_ids instead.", "in": "query", "required": false, "type": "string" }, + { + "name": "instance_ids", + "description": "(Optional) Filter on the provided instance_ids. At least one of app_id, service_id, deployment_id, regional_deployment_id or instance_ids must be set.", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + }, { "name": "stream", + "description": "Deprecated, prefer using streams instead.", "in": "query", "required": false, "type": "string" }, + { + "name": "streams", + "description": "(Optional) Filter on stream: either \"stdout\", \"stderr\" or \"koyeb\" (for system logs).", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + }, { "name": "start", + "description": "(Optional) Defaults to 24 hours ago.", "in": "query", "required": false, "type": "string", @@ -6491,6 +6562,7 @@ }, { "name": "limit", + "description": "(Optional) Defaults to 1000. Maximum of 1000.", "in": "query", "required": false, "type": "string", @@ -6509,6 +6581,17 @@ "in": "query", "required": false, "type": "string" + }, + { + "name": "regions", + "description": "(Optional) Filter on the provided regions (e.g. [\"fra\", \"was\"]).", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" } ], "tags": [ @@ -13385,7 +13468,8 @@ "enum": [ "INVALID", "WEB", - "WORKER" + "WORKER", + "SANDBOX" ], "default": "INVALID" }, @@ -13766,7 +13850,8 @@ "INVALID", "WEB", "WORKER", - "DATABASE" + "DATABASE", + "SANDBOX" ], "default": "INVALID" }, @@ -14157,7 +14242,8 @@ "INVALID_TYPE", "WEB", "WORKER", - "DATABASE" + "DATABASE", + "SANDBOX" ], "default": "INVALID_TYPE" }, @@ -18906,6 +18992,9 @@ "external_id": { "type": "string" }, + "provisioning": { + "type": "boolean" + }, "address1": { "type": "string" }, From d5417646a2019fc04284afc74cee3812e7568764 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Wed, 5 Nov 2025 14:13:04 +0100 Subject: [PATCH 41/47] Update sandbox.md --- docs/sandbox.md | 1639 ++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 1461 insertions(+), 178 deletions(-) diff --git a/docs/sandbox.md b/docs/sandbox.md index 39719f2e..b2f63cce 100644 --- a/docs/sandbox.md +++ b/docs/sandbox.md @@ -9,7 +9,7 @@ Koyeb Sandbox - Interactive execution environment for running arbitrary code on # koyeb/sandbox.exec Command execution utilities for Koyeb Sandbox instances -Using WebSocket connection to Koyeb API +Using SandboxClient HTTP API @@ -98,7 +98,7 @@ Execute a command in a shell synchronously. Supports streaming output via callba - `command` - Command to execute as a string (e.g., "python -c 'print(2+2)'") - `cwd` - Working directory for the command - `env` - Environment variables for the command -- `timeout` - Command timeout in seconds +- `timeout` - Command timeout in seconds (enforced for HTTP requests) - `on_stdout` - Optional callback for streaming stdout chunks - `on_stderr` - Optional callback for streaming stderr chunks @@ -156,7 +156,7 @@ Execute a command in a shell asynchronously. Supports streaming output via callb - `command` - Command to execute as a string (e.g., "python -c 'print(2+2)'") - `cwd` - Working directory for the command - `env` - Environment variables for the command -- `timeout` - Command timeout in seconds +- `timeout` - Command timeout in seconds (enforced for HTTP requests) - `on_stdout` - Optional callback for streaming stdout chunks - `on_stderr` - Optional callback for streaming stderr chunks @@ -185,7 +185,7 @@ Execute a command in a shell asynchronously. Supports streaming output via callb # koyeb/sandbox.filesystem Filesystem operations for Koyeb Sandbox instances -Using only the primitives available in the Koyeb API +Using SandboxClient HTTP API @@ -197,22 +197,22 @@ class SandboxFilesystemError(SandboxError) Base exception for filesystem operations - + -## FileNotFoundError Objects +## SandboxFileNotFoundError Objects ```python -class FileNotFoundError(SandboxFilesystemError) +class SandboxFileNotFoundError(SandboxFilesystemError) ``` Raised when file or directory not found - + -## FileExistsError Objects +## SandboxFileExistsError Objects ```python -class FileExistsError(SandboxFilesystemError) +class SandboxFileExistsError(SandboxFilesystemError) ``` Raised when file already exists @@ -237,7 +237,7 @@ class SandboxFilesystem() ``` Synchronous filesystem operations for Koyeb Sandbox instances. -Using only the primitives available in the Koyeb API. +Using SandboxClient HTTP API. For async usage, use AsyncSandboxFilesystem instead. @@ -292,7 +292,7 @@ Create a directory synchronously. **Arguments**: - `path` - Absolute path to the directory -- `recursive` - Create parent directories if needed (default: False) +- `recursive` - Create parent directories if needed (default: False, not used - API always creates parents) @@ -420,7 +420,9 @@ Check if path is a directory synchronously #### upload\_file ```python -def upload_file(local_path: str, remote_path: str) -> None +def upload_file(local_path: str, + remote_path: str, + encoding: str = "utf-8") -> None ``` Upload a local file to the sandbox synchronously. @@ -429,13 +431,22 @@ Upload a local file to the sandbox synchronously. - `local_path` - Path to the local file - `remote_path` - Destination path in the sandbox +- `encoding` - File encoding (default: "utf-8"). Use "base64" for binary files. + + +**Raises**: + +- `SandboxFileNotFoundError` - If local file doesn't exist +- `UnicodeDecodeError` - If file cannot be decoded with specified encoding #### download\_file ```python -def download_file(remote_path: str, local_path: str) -> None +def download_file(remote_path: str, + local_path: str, + encoding: str = "utf-8") -> None ``` Download a file from the sandbox to a local path synchronously. @@ -444,6 +455,12 @@ Download a file from the sandbox to a local path synchronously. - `remote_path` - Path to the file in the sandbox - `local_path` - Destination path on the local filesystem +- `encoding` - File encoding (default: "utf-8"). Use "base64" for binary files. + + +**Raises**: + +- `SandboxFileNotFoundError` - If remote file doesn't exist @@ -484,7 +501,7 @@ Remove file or directory synchronously. #### open ```python -def open(path: str, mode: str = "r") -> "SandboxFileIO" +def open(path: str, mode: str = "r") -> SandboxFileIO ``` Open a file in the sandbox synchronously. @@ -515,6 +532,7 @@ Inherits from SandboxFilesystem and provides async methods. #### write\_file ```python +@async_wrapper("write_file") async def write_file(path: str, content: Union[str, bytes], encoding: str = "utf-8") -> None @@ -533,6 +551,7 @@ Write content to a file asynchronously. #### read\_file ```python +@async_wrapper("read_file") async def read_file(path: str, encoding: str = "utf-8") -> FileInfo ``` @@ -553,6 +572,7 @@ Read a file from the sandbox asynchronously. #### mkdir ```python +@async_wrapper("mkdir") async def mkdir(path: str, recursive: bool = False) -> None ``` @@ -561,13 +581,14 @@ Create a directory asynchronously. **Arguments**: - `path` - Absolute path to the directory -- `recursive` - Create parent directories if needed (default: False) +- `recursive` - Create parent directories if needed (default: False, not used - API always creates parents) #### list\_dir ```python +@async_wrapper("list_dir") async def list_dir(path: str = ".") -> List[str] ``` @@ -587,6 +608,7 @@ List contents of a directory asynchronously. #### delete\_file ```python +@async_wrapper("delete_file") async def delete_file(path: str) -> None ``` @@ -601,6 +623,7 @@ Delete a file asynchronously. #### delete\_dir ```python +@async_wrapper("delete_dir") async def delete_dir(path: str) -> None ``` @@ -615,6 +638,7 @@ Delete a directory asynchronously. #### rename\_file ```python +@async_wrapper("rename_file") async def rename_file(old_path: str, new_path: str) -> None ``` @@ -630,6 +654,7 @@ Rename a file asynchronously. #### move\_file ```python +@async_wrapper("move_file") async def move_file(source_path: str, destination_path: str) -> None ``` @@ -659,6 +684,7 @@ Write multiple files in a single operation asynchronously. #### exists ```python +@async_wrapper("exists") async def exists(path: str) -> bool ``` @@ -669,6 +695,7 @@ Check if file/directory exists asynchronously #### is\_file ```python +@async_wrapper("is_file") async def is_file(path: str) -> bool ``` @@ -679,6 +706,7 @@ Check if path is a file asynchronously #### is\_dir ```python +@async_wrapper("is_dir") async def is_dir(path: str) -> bool ``` @@ -689,7 +717,10 @@ Check if path is a directory asynchronously #### upload\_file ```python -async def upload_file(local_path: str, remote_path: str) -> None +@async_wrapper("upload_file") +async def upload_file(local_path: str, + remote_path: str, + encoding: str = "utf-8") -> None ``` Upload a local file to the sandbox asynchronously. @@ -698,13 +729,17 @@ Upload a local file to the sandbox asynchronously. - `local_path` - Path to the local file - `remote_path` - Destination path in the sandbox +- `encoding` - File encoding (default: "utf-8"). Use "base64" for binary files. #### download\_file ```python -async def download_file(remote_path: str, local_path: str) -> None +@async_wrapper("download_file") +async def download_file(remote_path: str, + local_path: str, + encoding: str = "utf-8") -> None ``` Download a file from the sandbox to a local path asynchronously. @@ -713,6 +748,7 @@ Download a file from the sandbox to a local path asynchronously. - `remote_path` - Path to the file in the sandbox - `local_path` - Destination path on the local filesystem +- `encoding` - File encoding (default: "utf-8"). Use "base64" for binary files. @@ -738,6 +774,7 @@ List directory contents asynchronously. #### rm ```python +@async_wrapper("rm") async def rm(path: str, recursive: bool = False) -> None ``` @@ -753,7 +790,7 @@ Remove file or directory asynchronously. #### open ```python -def open(path: str, mode: str = "r") -> "AsyncSandboxFileIO" +def open(path: str, mode: str = "r") -> AsyncSandboxFileIO ``` Open a file in the sandbox asynchronously. @@ -854,6 +891,64 @@ Close the file Koyeb Sandbox - Python SDK for creating and managing Koyeb sandboxes + + +## ProcessInfo Objects + +```python +@dataclass +class ProcessInfo() +``` + +Type definition for process information returned by list_processes. + + + +#### id + +Process ID (UUID string) + + + +#### command + +The command that was executed + + + +#### status + +Process status (e.g., "running", "completed") + + + +#### pid + +OS process ID (if running) + + + +#### exit\_code + +Exit code (if completed) + + + +#### started\_at + +ISO 8601 timestamp when process started + + + +## ExposedPort Objects + +```python +@dataclass +class ExposedPort() +``` + +Result of exposing a port via TCP proxy. + ## Sandbox Objects @@ -865,6 +960,17 @@ class Sandbox() Synchronous sandbox for running code on Koyeb infrastructure. Provides creation and deletion functionality with proper health polling. + + +#### id + +```python +@property +def id() -> str +``` + +Get the service ID of the sandbox. + #### create @@ -872,42 +978,80 @@ Provides creation and deletion functionality with proper health polling. ```python @classmethod def create(cls, - image: str = "docker.io/library/ubuntu:latest", + image: str = "koyeb/sandbox", name: str = "quick-sandbox", wait_ready: bool = True, instance_type: str = "nano", - ports: Optional[List[DeploymentPort]] = None, + exposed_port_protocol: Optional[str] = None, env: Optional[Dict[str, str]] = None, regions: Optional[List[str]] = None, api_token: Optional[str] = None, - timeout: int = 300) -> "Sandbox" + timeout: int = 300, + idle_timeout: Optional[IdleTimeout] = None, + enable_tcp_proxy: bool = False) -> Sandbox ``` Create a new sandbox instance. **Arguments**: -- `image` - Docker image to use (default: ubuntu:latest) +- `image` - Docker image to use (default: koyeb/sandbox) - `name` - Name of the sandbox - `wait_ready` - Wait for sandbox to be ready (default: True) - `instance_type` - Instance type (default: nano) -- `ports` - List of ports to expose +- `exposed_port_protocol` - Protocol to expose ports with ("http" or "http2"). + If None, defaults to "http". + If provided, must be one of "http" or "http2". - `env` - Environment variables - `regions` - List of regions to deploy to (default: ["na"]) - `api_token` - Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) - `timeout` - Timeout for sandbox creation in seconds +- `idle_timeout` - Idle timeout configuration for scale-to-zero + - None: Auto-enable (light_sleep=300s, deep_sleep=600s) + - 0: Disable scale-to-zero (keep always-on) + - int > 0: Deep sleep only (e.g., 600 for 600s deep sleep) + - dict: Explicit configuration with {"light_sleep": 300, "deep_sleep": 600} +- `enable_tcp_proxy` - If True, enables TCP proxy for direct TCP access to port 3031 **Returns**: - `Sandbox` - A new Sandbox instance + + +#### get\_from\_id + +```python +@classmethod +def get_from_id(cls, id: str, api_token: Optional[str] = None) -> "Sandbox" +``` + +Get a sandbox by service ID. + +**Arguments**: + +- `id` - Service ID of the sandbox +- `api_token` - Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) + + +**Returns**: + +- `Sandbox` - The Sandbox instance + + +**Raises**: + +- `ValueError` - If API token is not provided or id is invalid +- `SandboxError` - If sandbox is not found or retrieval fails + #### wait\_ready ```python -def wait_ready(timeout: int = 60, poll_interval: float = 2.0) -> bool +def wait_ready(timeout: int = DEFAULT_INSTANCE_WAIT_TIMEOUT, + poll_interval: float = DEFAULT_POLL_INTERVAL) -> bool ``` Wait for sandbox to become ready with proper polling. @@ -922,6 +1066,31 @@ Wait for sandbox to become ready with proper polling. - `bool` - True if sandbox became ready, False if timeout + + +#### wait\_tcp\_proxy\_ready + +```python +def wait_tcp_proxy_ready(timeout: int = DEFAULT_INSTANCE_WAIT_TIMEOUT, + poll_interval: float = DEFAULT_POLL_INTERVAL) -> bool +``` + +Wait for TCP proxy to become ready and available. + +Polls the deployment metadata until the TCP proxy information is available. +This is useful when enable_tcp_proxy=True was set during sandbox creation, +as the proxy information may not be immediately available. + +**Arguments**: + +- `timeout` - Maximum time to wait in seconds +- `poll_interval` - Time between checks in seconds + + +**Returns**: + +- `bool` - True if TCP proxy became ready, False if timeout + #### delete @@ -932,6 +1101,40 @@ def delete() -> None Delete the sandbox instance. + + +#### get\_domain + +```python +def get_domain() -> Optional[str] +``` + +Get the public domain of the sandbox. + +Returns the domain name (e.g., "app-name-org.koyeb.app") without protocol or path. +To construct the URL, use: f"https://{sandbox.get_domain()}" + +**Returns**: + +- `Optional[str]` - The domain name or None if unavailable + + + +#### get\_tcp\_proxy\_info + +```python +def get_tcp_proxy_info() -> Optional[tuple[str, int]] +``` + +Get the TCP proxy host and port for the sandbox. + +Returns the TCP proxy host and port as a tuple (host, port) for direct TCP access to port 3031. +This is only available if enable_tcp_proxy=True was set when creating the sandbox. + +**Returns**: + + Optional[tuple[str, int]]: A tuple of (host, port) or None if unavailable + #### status @@ -958,7 +1161,7 @@ Check if sandbox is healthy and ready for operations ```python @property -def filesystem() +def filesystem() -> "SandboxFilesystem" ``` Get filesystem operations interface @@ -969,276 +1172,1356 @@ Get filesystem operations interface ```python @property -def exec() +def exec() -> "SandboxExecutor" ``` Get command execution interface - + -## AsyncSandbox Objects +#### expose\_port ```python -class AsyncSandbox(Sandbox) +def expose_port(port: int) -> ExposedPort ``` -Async sandbox for running code on Koyeb infrastructure. -Inherits from Sandbox and provides async wrappers for all operations. - - - -#### create - -```python -@classmethod -async def create(cls, - image: str = "docker.io/library/ubuntu:latest", - name: str = "quick-sandbox", - wait_ready: bool = True, - instance_type: str = "nano", - ports: Optional[List[DeploymentPort]] = None, - env: Optional[Dict[str, str]] = None, - regions: Optional[List[str]] = None, - api_token: Optional[str] = None, - timeout: int = 300) -> "AsyncSandbox" -``` +Expose a port to external connections via TCP proxy. -Create a new sandbox instance with async support. +Binds the specified internal port to the TCP proxy, allowing external +connections to reach services running on that port inside the sandbox. +Automatically unbinds any existing port before binding the new one. **Arguments**: -- `image` - Docker image to use (default: ubuntu:latest) -- `name` - Name of the sandbox -- `wait_ready` - Wait for sandbox to be ready (default: True) -- `instance_type` - Instance type (default: nano) -- `ports` - List of ports to expose -- `env` - Environment variables -- `regions` - List of regions to deploy to (default: ["na"]) -- `api_token` - Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) -- `timeout` - Timeout for sandbox creation in seconds +- `port` - The internal port number to expose (must be a valid port number between 1 and 65535) **Returns**: -- `AsyncSandbox` - A new AsyncSandbox instance - - - -#### wait\_ready +- `ExposedPort` - An object with `port` and `exposed_at` attributes: + - port: The exposed port number + - exposed_at: The full URL with https:// protocol (e.g., "https://app-name-org.koyeb.app") + -```python -async def wait_ready(timeout: int = 60, poll_interval: float = 2.0) -> bool -``` +**Raises**: -Wait for sandbox to become ready with proper async polling. +- `ValueError` - If port is not in valid range [1, 65535] +- `SandboxError` - If the port binding operation fails + -**Arguments**: +**Notes**: -- `timeout` - Maximum time to wait in seconds -- `poll_interval` - Time between health checks in seconds + - Only one port can be exposed at a time + - Any existing port binding is automatically unbound before binding the new port + - The port must be available and accessible within the sandbox environment + - The TCP proxy is accessed via get_tcp_proxy_info() which returns (host, port) -**Returns**: +**Example**: -- `bool` - True if sandbox became ready, False if timeout + >>> result = sandbox.expose_port(8080) + >>> result.port + 8080 + >>> result.exposed_at + 'https://app-name-org.koyeb.app' - + -#### delete +#### unexpose\_port ```python -async def delete() -> None +def unexpose_port() -> None ``` -Delete the sandbox instance asynchronously. +Unexpose a port from external connections. - +Removes the TCP proxy port binding, stopping traffic forwarding to the +previously bound port. -#### status +**Raises**: -```python -async def status() -> str -``` +- `SandboxError` - If the port unbinding operation fails + -Get current sandbox status asynchronously +**Notes**: - + - After unexposing, the TCP proxy will no longer forward traffic + - Safe to call even if no port is currently bound -#### is\_healthy + + +#### launch\_process ```python -async def is_healthy() -> bool +def launch_process(cmd: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None) -> str ``` -Check if sandbox is healthy and ready for operations asynchronously - - - -#### exec +Launch a background process in the sandbox. -```python -@property -def exec() -``` +Starts a long-running background process that continues executing even after +the method returns. Use this for servers, workers, or other long-running tasks. -Get async command execution interface +**Arguments**: - +- `cmd` - The shell command to execute as a background process +- `cwd` - Optional working directory for the process +- `env` - Optional environment variables to set/override for the process + -#### filesystem +**Returns**: -```python -@property -def filesystem() -``` +- `str` - The unique process ID (UUID string) that can be used to manage the process + -Get filesystem operations interface +**Raises**: - +- `SandboxError` - If the process launch fails + -# koyeb/sandbox.utils +**Example**: -Utility functions for Koyeb Sandbox + >>> process_id = sandbox.launch_process("python -u server.py") + >>> print(f"Started process: {process_id}") - + -#### get\_api\_client +#### kill\_process ```python -def get_api_client( - api_token: Optional[str] = None, - host: Optional[str] = None -) -> tuple[AppsApi, ServicesApi, InstancesApi] +def kill_process(process_id: str) -> None ``` -Get configured API clients for Koyeb operations. +Kill a background process by its ID. + +Terminates a running background process. This sends a SIGTERM signal to the process, +allowing it to clean up gracefully. If the process doesn't terminate within a timeout, +it will be forcefully killed with SIGKILL. **Arguments**: -- `api_token` - Koyeb API token. If not provided, will try to get from KOYEB_API_TOKEN env var -- `host` - Koyeb API host URL. If not provided, will try to get from KOYEB_API_HOST env var (defaults to https://app.koyeb.com) +- `process_id` - The unique process ID (UUID string) to kill -**Returns**: +**Raises**: - Tuple of (AppsApi, ServicesApi, InstancesApi) instances +- `SandboxError` - If the process kill operation fails -**Raises**: +**Example**: -- `ValueError` - If API token is not provided + >>> sandbox.kill_process("550e8400-e29b-41d4-a716-446655440000") - + -#### build\_env\_vars +#### list\_processes ```python -def build_env_vars(env: Optional[Dict[str, str]]) -> List[DeploymentEnv] +def list_processes() -> List[ProcessInfo] ``` -Build environment variables list from dictionary. +List all background processes. -**Arguments**: +Returns information about all currently running and recently completed background +processes. This includes both active processes and processes that have completed +(which remain in memory until server restart). -- `env` - Dictionary of environment variables +**Returns**: + +- `List[ProcessInfo]` - List of process objects, each containing: + - id: Process ID (UUID string) + - command: The command that was executed + - status: Process status (e.g., "running", "completed") + - pid: OS process ID (if running) + - exit_code: Exit code (if completed) + - started_at: ISO 8601 timestamp when process started + - completed_at: ISO 8601 timestamp when process completed (if applicable) -**Returns**: +**Raises**: - List of DeploymentEnv objects +- `SandboxError` - If listing processes fails + - +**Example**: -#### create\_docker\_source + >>> processes = sandbox.list_processes() + >>> for process in processes: + ... print(f"{process.id}: {process.command} - {process.status}") + + + +#### kill\_all\_processes ```python -def create_docker_source(image: str, command_args: List[str]) -> DockerSource +def kill_all_processes() -> int ``` -Create Docker source configuration. +Kill all running background processes. -**Arguments**: +Convenience method that lists all processes and kills them all. This is useful +for cleanup operations. -- `image` - Docker image name -- `command_args` - Command and arguments to run +**Returns**: + +- `int` - The number of processes that were killed -**Returns**: +**Raises**: - DockerSource object +- `SandboxError` - If listing or killing processes fails + - +**Example**: -#### create\_deployment\_definition + >>> count = sandbox.kill_all_processes() + >>> print(f"Killed {count} processes") + + + +#### \_\_enter\_\_ ```python -def create_deployment_definition( - name: str, - docker_source: DockerSource, - env_vars: List[DeploymentEnv], - instance_type: str, - ports: Optional[List[DeploymentPort]] = None, - regions: List[str] = None) -> DeploymentDefinition +def __enter__() -> "Sandbox" ``` -Create deployment definition for a sandbox service. +Context manager entry - returns self. -**Arguments**: + -- `name` - Service name -- `docker_source` - Docker configuration -- `env_vars` - Environment variables -- `instance_type` - Instance type -- `ports` - List of ports (if provided, type becomes WEB, otherwise WORKER) -- `regions` - List of regions (defaults to North America) - +#### \_\_exit\_\_ -**Returns**: +```python +def __exit__(exc_type, exc_val, exc_tb) -> None +``` - DeploymentDefinition object +Context manager exit - automatically deletes the sandbox. - + -#### get\_sandbox\_status +## AsyncSandbox Objects ```python -def get_sandbox_status(instance_id: str, - api_token: Optional[str] = None) -> InstanceStatus +class AsyncSandbox(Sandbox) ``` -Get the current status of a sandbox instance. +Async sandbox for running code on Koyeb infrastructure. +Inherits from Sandbox and provides async wrappers for all operations. + + + +#### get\_from\_id + +```python +@classmethod +async def get_from_id(cls, + id: str, + api_token: Optional[str] = None) -> "AsyncSandbox" +``` + +Get a sandbox by service ID asynchronously. + +**Arguments**: + +- `id` - Service ID of the sandbox +- `api_token` - Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) + + +**Returns**: + +- `AsyncSandbox` - The AsyncSandbox instance + + +**Raises**: + +- `ValueError` - If API token is not provided or id is invalid +- `SandboxError` - If sandbox is not found or retrieval fails + + + +#### create + +```python +@classmethod +async def create(cls, + image: str = "koyeb/sandbox", + name: str = "quick-sandbox", + wait_ready: bool = True, + instance_type: str = "nano", + exposed_port_protocol: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + regions: Optional[List[str]] = None, + api_token: Optional[str] = None, + timeout: int = 300, + idle_timeout: Optional[IdleTimeout] = None, + enable_tcp_proxy: bool = False) -> AsyncSandbox +``` + +Create a new sandbox instance with async support. + +**Arguments**: + +- `image` - Docker image to use (default: koyeb/sandbox) +- `name` - Name of the sandbox +- `wait_ready` - Wait for sandbox to be ready (default: True) +- `instance_type` - Instance type (default: nano) +- `exposed_port_protocol` - Protocol to expose ports with ("http" or "http2"). + If None, defaults to "http". + If provided, must be one of "http" or "http2". +- `env` - Environment variables +- `regions` - List of regions to deploy to (default: ["na"]) +- `api_token` - Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) +- `timeout` - Timeout for sandbox creation in seconds +- `idle_timeout` - Idle timeout configuration for scale-to-zero + - None: Auto-enable (light_sleep=300s, deep_sleep=600s) + - 0: Disable scale-to-zero (keep always-on) + - int > 0: Deep sleep only (e.g., 600 for 600s deep sleep) + - dict: Explicit configuration with {"light_sleep": 300, "deep_sleep": 600} +- `enable_tcp_proxy` - If True, enables TCP proxy for direct TCP access to port 3031 + + +**Returns**: + +- `AsyncSandbox` - A new AsyncSandbox instance + + + +#### wait\_ready + +```python +async def wait_ready(timeout: int = DEFAULT_INSTANCE_WAIT_TIMEOUT, + poll_interval: float = DEFAULT_POLL_INTERVAL) -> bool +``` + +Wait for sandbox to become ready with proper async polling. + +**Arguments**: + +- `timeout` - Maximum time to wait in seconds +- `poll_interval` - Time between health checks in seconds + + +**Returns**: + +- `bool` - True if sandbox became ready, False if timeout + + + +#### wait\_tcp\_proxy\_ready + +```python +async def wait_tcp_proxy_ready( + timeout: int = DEFAULT_INSTANCE_WAIT_TIMEOUT, + poll_interval: float = DEFAULT_POLL_INTERVAL) -> bool +``` + +Wait for TCP proxy to become ready and available asynchronously. + +Polls the deployment metadata until the TCP proxy information is available. +This is useful when enable_tcp_proxy=True was set during sandbox creation, +as the proxy information may not be immediately available. + +**Arguments**: + +- `timeout` - Maximum time to wait in seconds +- `poll_interval` - Time between checks in seconds + + +**Returns**: + +- `bool` - True if TCP proxy became ready, False if timeout + + + +#### delete + +```python +@async_wrapper("delete") +async def delete() -> None +``` + +Delete the sandbox instance asynchronously. + + + +#### status + +```python +@async_wrapper("status") +async def status() -> str +``` + +Get current sandbox status asynchronously + + + +#### is\_healthy + +```python +@async_wrapper("is_healthy") +async def is_healthy() -> bool +``` + +Check if sandbox is healthy and ready for operations asynchronously + + + +#### exec + +```python +@property +def exec() -> "AsyncSandboxExecutor" +``` + +Get async command execution interface + + + +#### filesystem + +```python +@property +def filesystem() -> "AsyncSandboxFilesystem" +``` + +Get filesystem operations interface + + + +#### expose\_port + +```python +@async_wrapper("expose_port") +async def expose_port(port: int) -> ExposedPort +``` + +Expose a port to external connections via TCP proxy asynchronously. + + + +#### unexpose\_port + +```python +@async_wrapper("unexpose_port") +async def unexpose_port() -> None +``` + +Unexpose a port from external connections asynchronously. + + + +#### launch\_process + +```python +@async_wrapper("launch_process") +async def launch_process(cmd: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None) -> str +``` + +Launch a background process in the sandbox asynchronously. + + + +#### kill\_process + +```python +@async_wrapper("kill_process") +async def kill_process(process_id: str) -> None +``` + +Kill a background process by its ID asynchronously. + + + +#### list\_processes + +```python +@async_wrapper("list_processes") +async def list_processes() -> List[ProcessInfo] +``` + +List all background processes asynchronously. + + + +#### kill\_all\_processes + +```python +async def kill_all_processes() -> int +``` + +Kill all running background processes asynchronously. + + + +#### \_\_aenter\_\_ + +```python +async def __aenter__() -> "AsyncSandbox" +``` + +Async context manager entry - returns self. + + + +#### \_\_aexit\_\_ + +```python +async def __aexit__(exc_type, exc_val, exc_tb) -> None +``` + +Async context manager exit - automatically deletes the sandbox. + + + +# koyeb/sandbox.utils + +Utility functions for Koyeb Sandbox + + + +#### DEFAULT\_INSTANCE\_WAIT\_TIMEOUT + +seconds + + + +#### DEFAULT\_POLL\_INTERVAL + +seconds + + + +#### DEFAULT\_COMMAND\_TIMEOUT + +seconds + + + +#### DEFAULT\_HTTP\_TIMEOUT + +seconds for HTTP requests + + + +## IdleTimeoutConfig Objects + +```python +class IdleTimeoutConfig(TypedDict) +``` + +Configuration for idle timeout with light and deep sleep. + + + +#### light\_sleep + +Optional, but if provided, deep_sleep is required + + + +#### deep\_sleep + +Required + + + +#### get\_api\_client + +```python +def get_api_client( + api_token: Optional[str] = None, + host: Optional[str] = None +) -> tuple[AppsApi, ServicesApi, InstancesApi, CatalogInstancesApi] +``` + +Get configured API clients for Koyeb operations. + +**Arguments**: + +- `api_token` - Koyeb API token. If not provided, will try to get from KOYEB_API_TOKEN env var +- `host` - Koyeb API host URL. If not provided, will try to get from KOYEB_API_HOST env var (defaults to https://app.koyeb.com) + + +**Returns**: + + Tuple of (AppsApi, ServicesApi, InstancesApi, CatalogInstancesApi) instances + + +**Raises**: + +- `ValueError` - If API token is not provided + + + +#### build\_env\_vars + +```python +def build_env_vars(env: Optional[Dict[str, str]]) -> List[DeploymentEnv] +``` + +Build environment variables list from dictionary. + +**Arguments**: + +- `env` - Dictionary of environment variables + + +**Returns**: + + List of DeploymentEnv objects + + + +#### create\_docker\_source + +```python +def create_docker_source(image: str, command_args: List[str]) -> DockerSource +``` + +Create Docker source configuration. + +**Arguments**: + +- `image` - Docker image name +- `command_args` - Command and arguments to run (optional, empty list means use image default) + + +**Returns**: + + DockerSource object + + + +#### create\_koyeb\_sandbox\_ports + +```python +def create_koyeb_sandbox_ports(protocol: str = "http") -> List[DeploymentPort] +``` + +Create port configuration for koyeb/sandbox image. + +Creates two ports: +- Port 3030 exposed on HTTP, mounted on /koyeb-sandbox/ +- Port 3031 exposed with the specified protocol, mounted on / + +**Arguments**: + +- `protocol` - Protocol to use for port 3031 ("http" or "http2"), defaults to "http" + + +**Returns**: + + List of DeploymentPort objects configured for koyeb/sandbox + + + +#### create\_koyeb\_sandbox\_proxy\_ports + +```python +def create_koyeb_sandbox_proxy_ports() -> List[DeploymentProxyPort] +``` + +Create TCP proxy port configuration for koyeb/sandbox image. + +Creates proxy port for direct TCP access: +- Port 3031 exposed via TCP proxy + +**Returns**: + + List of DeploymentProxyPort objects configured for TCP proxy access + + + +#### create\_koyeb\_sandbox\_routes + +```python +def create_koyeb_sandbox_routes() -> List[DeploymentRoute] +``` + +Create route configuration for koyeb/sandbox image to make it publicly accessible. + +Creates two routes: +- Port 3030 accessible at /koyeb-sandbox/ +- Port 3031 accessible at / + +**Returns**: + + List of DeploymentRoute objects configured for koyeb/sandbox + + + +#### create\_deployment\_definition + +```python +def create_deployment_definition( + name: str, + docker_source: DockerSource, + env_vars: List[DeploymentEnv], + instance_type: str, + exposed_port_protocol: Optional[str] = None, + regions: List[str] = None, + routes: Optional[List[DeploymentRoute]] = None, + idle_timeout: Optional[IdleTimeout] = None, + light_sleep_enabled: bool = True, + enable_tcp_proxy: bool = False) -> DeploymentDefinition +``` + +Create deployment definition for a sandbox service. + +**Arguments**: + +- `name` - Service name +- `docker_source` - Docker configuration +- `env_vars` - Environment variables +- `instance_type` - Instance type +- `exposed_port_protocol` - Protocol to expose ports with ("http" or "http2"). + If None, defaults to "http". + If provided, must be one of "http" or "http2". +- `regions` - List of regions (defaults to ["na"]) +- `routes` - List of routes for public access +- `idle_timeout` - Idle timeout configuration (see IdleTimeout type) +- `light_sleep_enabled` - Whether light sleep is enabled for the instance type (default: True) +- `enable_tcp_proxy` - If True, enables TCP proxy for direct TCP access to port 3031 + + +**Returns**: + + DeploymentDefinition object + + + +#### get\_sandbox\_status + +```python +def get_sandbox_status(instance_id: str, + api_token: Optional[str] = None) -> InstanceStatus +``` + +Get the current status of a sandbox instance. + + + +#### is\_sandbox\_healthy + +```python +def is_sandbox_healthy(instance_id: str, + sandbox_url: str, + sandbox_secret: str, + api_token: Optional[str] = None) -> bool +``` + +Check if sandbox is healthy and ready for operations. + +This function requires both sandbox_url and sandbox_secret to properly check: +1. The Koyeb instance status (via API) - using instance_id and api_token +2. The sandbox executor health endpoint (via SandboxClient) - using sandbox_url and sandbox_secret + +**Arguments**: + +- `instance_id` - The Koyeb instance ID +- `api_token` - Koyeb API token +- `sandbox_url` - URL of the sandbox executor API (required) +- `sandbox_secret` - Secret for sandbox executor authentication (required) + + +**Returns**: + +- `bool` - True if sandbox is healthy, False otherwise + + +**Raises**: + +- `ValueError` - If sandbox_url or sandbox_secret are not provided + + + +#### escape\_shell\_arg + +```python +def escape_shell_arg(arg: str) -> str +``` + +Escape a shell argument for safe use in shell commands. + +**Arguments**: + +- `arg` - The argument to escape + + +**Returns**: + + Properly escaped shell argument + + + +#### validate\_port + +```python +def validate_port(port: int) -> None +``` + +Validate that a port number is in the valid range. + +**Arguments**: + +- `port` - Port number to validate + + +**Raises**: + +- `ValueError` - If port is not in valid range [1, 65535] + + + +#### check\_error\_message + +```python +def check_error_message(error_msg: str, error_type: str) -> bool +``` + +Check if an error message matches a specific error type. +Uses case-insensitive matching against known error patterns. + +**Arguments**: + +- `error_msg` - The error message to check +- `error_type` - The type of error to check for (key in ERROR_MESSAGES) + + +**Returns**: + + True if error message matches the error type + + + +#### run\_sync\_in\_executor + +```python +async def run_sync_in_executor(method: Callable[..., Any], *args: Any, + **kwargs: Any) -> Any +``` + +Run a synchronous method in an async executor. + +Helper function to wrap synchronous methods for async execution. +Used by AsyncSandbox and AsyncSandboxFilesystem to wrap sync parent methods. + +**Arguments**: + +- `method` - The synchronous method to run +- `*args` - Positional arguments for the method +- `**kwargs` - Keyword arguments for the method + + +**Returns**: + + Result of the synchronous method call + + + +#### async\_wrapper + +```python +def async_wrapper(method_name: str) +``` + +Decorator to automatically create async wrapper for sync methods. + +This decorator creates an async method that wraps a sync method from the parent class. +The sync method is called via super() and executed in an executor. + +**Arguments**: + +- `method_name` - Name of the sync method to wrap (from parent class) + + Usage: + @async_wrapper("delete") + async def delete(self) -> None: + """Delete the sandbox instance asynchronously.""" + pass # Implementation is handled by decorator + + + +#### create\_sandbox\_client + +```python +def create_sandbox_client(sandbox_url: Optional[str], + sandbox_secret: Optional[str], + existing_client: Optional[Any] = None) -> Any +``` + +Create or return existing SandboxClient instance with validation. + +Helper function to create SandboxClient instances with consistent validation. +Used by Sandbox, SandboxExecutor, and SandboxFilesystem to avoid duplication. + +**Arguments**: + +- `sandbox_url` - The sandbox URL (from _get_sandbox_url() or sandbox._get_sandbox_url()) +- `sandbox_secret` - The sandbox secret +- `existing_client` - Existing client instance to return if not None + + +**Returns**: + +- `SandboxClient` - Configured client instance + + +**Raises**: + +- `SandboxError` - If sandbox URL or secret is not available + + + +## SandboxError Objects + +```python +class SandboxError(Exception) +``` - +Base exception for sandbox operations -#### is\_sandbox\_healthy + + +# koyeb/sandbox.executor\_client + +Sandbox Executor API Client + +A simple Python client for interacting with the Sandbox Executor API. + + + +## SandboxClient Objects ```python -def is_sandbox_healthy(instance_id: str, - sandbox_url: str, - sandbox_secret: str, - api_token: Optional[str] = None) -> bool +class SandboxClient() ``` -Check if sandbox is healthy and ready for operations. +Client for the Sandbox Executor API. - + -#### ensure\_sandbox\_healthy +#### \_\_init\_\_ ```python -def ensure_sandbox_healthy(instance_id: str, - api_token: Optional[str] = None) -> None +def __init__(base_url: str, + secret: str, + timeout: float = DEFAULT_HTTP_TIMEOUT) ``` -Ensure a sandbox instance is healthy, raising an exception if not. +Initialize the Sandbox Client. - +**Arguments**: -## SandboxError Objects +- `base_url` - The base URL of the sandbox server (e.g., 'http://localhost:8080') +- `secret` - The authentication secret/token +- `timeout` - Request timeout in seconds (default: 30) + + + +#### close ```python -class SandboxError(Exception) +def close() -> None ``` -Base exception for sandbox operations +Close the HTTP session and release resources. + + + +#### \_\_enter\_\_ + +```python +def __enter__() +``` + +Context manager entry - returns self. + + + +#### \_\_exit\_\_ + +```python +def __exit__(exc_type, exc_val, exc_tb) -> None +``` + +Context manager exit - automatically closes the session. + + + +#### \_\_del\_\_ + +```python +def __del__() +``` + +Clean up session on deletion (fallback, not guaranteed to run). + + + +#### health + +```python +def health() -> Dict[str, str] +``` + +Check the health status of the server. + +**Returns**: + + Dict with status information + + +**Raises**: + +- `requests.HTTPError` - If the health check fails + + + +#### run + +```python +def run(cmd: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + timeout: Optional[float] = None) -> Dict[str, Any] +``` + +Execute a shell command in the sandbox. + +**Arguments**: + +- `cmd` - The shell command to execute +- `cwd` - Optional working directory for command execution +- `env` - Optional environment variables to set/override +- `timeout` - Optional timeout in seconds for the request + + +**Returns**: + + Dict containing stdout, stderr, error (if any), and exit code + + + +#### run\_streaming + +```python +def run_streaming(cmd: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + timeout: Optional[float] = None) -> Iterator[Dict[str, Any]] +``` + +Execute a shell command in the sandbox and stream the output in real-time. + +This method uses Server-Sent Events (SSE) to stream command output line-by-line +as it's produced. Use this for long-running commands where you want real-time +output. For simple commands where buffered output is acceptable, use run() instead. + +**Arguments**: + +- `cmd` - The shell command to execute +- `cwd` - Optional working directory for command execution +- `env` - Optional environment variables to set/override +- `timeout` - Optional timeout in seconds for the streaming request + + +**Yields**: + + Dict events with the following types: + + - output events (as command produces output): +- `{"stream"` - "stdout"|"stderr", "data": "line of output"} + + - complete event (when command finishes): +- `{"code"` - , "error": false} + + - error event (if command fails to start): +- `{"error"` - "error message"} + + +**Example**: + + >>> client = SandboxClient("http://localhost:8080", "secret") + >>> for event in client.run_streaming("echo 'Hello'; sleep 1; echo 'World'"): + ... if "stream" in event: + ... print(f"{event['stream']}: {event['data']}") + ... elif "code" in event: + ... print(f"Exit code: {event['code']}") + + + +#### write\_file + +```python +def write_file(path: str, content: str) -> Dict[str, Any] +``` + +Write content to a file. + +**Arguments**: + +- `path` - The file path to write to +- `content` - The content to write + + +**Returns**: + + Dict with success status and error if any + + + +#### read\_file + +```python +def read_file(path: str) -> Dict[str, Any] +``` + +Read content from a file. + +**Arguments**: + +- `path` - The file path to read from + + +**Returns**: + + Dict with file content and error if any + + + +#### delete\_file + +```python +def delete_file(path: str) -> Dict[str, Any] +``` + +Delete a file. + +**Arguments**: + +- `path` - The file path to delete + + +**Returns**: + + Dict with success status and error if any + + + +#### make\_dir + +```python +def make_dir(path: str) -> Dict[str, Any] +``` + +Create a directory (including parent directories). + +**Arguments**: + +- `path` - The directory path to create + + +**Returns**: + + Dict with success status and error if any + + + +#### delete\_dir + +```python +def delete_dir(path: str) -> Dict[str, Any] +``` + +Recursively delete a directory and all its contents. + +**Arguments**: + +- `path` - The directory path to delete + + +**Returns**: + + Dict with success status and error if any + + + +#### list\_dir + +```python +def list_dir(path: str) -> Dict[str, Any] +``` + +List the contents of a directory. + +**Arguments**: + +- `path` - The directory path to list + + +**Returns**: + + Dict with entries list and error if any + + + +#### bind\_port + +```python +def bind_port(port: int) -> Dict[str, Any] +``` + +Bind a port to the TCP proxy for external access. + +Configures the TCP proxy to forward traffic to the specified port inside the sandbox. +This allows you to expose services running inside the sandbox to external connections. + +**Arguments**: + +- `port` - The port number to bind to (must be a valid port number) + + +**Returns**: + + Dict with success status, message, and port information + + +**Notes**: + + - Only one port can be bound at a time + - Binding a new port will override the previous binding + - The port must be available and accessible within the sandbox environment + + + +#### unbind\_port + +```python +def unbind_port(port: Optional[int] = None) -> Dict[str, Any] +``` + +Unbind a port from the TCP proxy. + +Removes the TCP proxy port binding, stopping traffic forwarding to the previously bound port. + +**Arguments**: + +- `port` - Optional port number to unbind. If provided, it must match the currently bound port. + If not provided, any existing binding will be removed. + + +**Returns**: + + Dict with success status and message + + +**Notes**: + + - If a port is specified and doesn't match the currently bound port, the request will fail + - After unbinding, the TCP proxy will no longer forward traffic + + + +#### start\_process + +```python +def start_process(cmd: str, + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None) -> Dict[str, Any] +``` + +Start a background process in the sandbox. + +Starts a long-running background process that continues executing even after +the API call completes. Use this for servers, workers, or other long-running tasks. + +**Arguments**: + +- `cmd` - The shell command to execute as a background process +- `cwd` - Optional working directory for the process +- `env` - Optional environment variables to set/override for the process + + +**Returns**: + + Dict with process id and success status: + - id: The unique process ID (UUID string) + - success: True if the process was started successfully + + +**Example**: + + >>> client = SandboxClient("http://localhost:8080", "secret") + >>> result = client.start_process("python -u server.py") + >>> process_id = result["id"] + >>> print(f"Started process: {process_id}") + + + +#### kill\_process + +```python +def kill_process(process_id: str) -> Dict[str, Any] +``` + +Kill a background process by its ID. + +Terminates a running background process. This sends a SIGTERM signal to the process, +allowing it to clean up gracefully. If the process doesn't terminate within a timeout, +it will be forcefully killed with SIGKILL. + +**Arguments**: + +- `process_id` - The unique process ID (UUID string) to kill + + +**Returns**: + + Dict with success status and error message if any + + +**Example**: + + >>> client = SandboxClient("http://localhost:8080", "secret") + >>> result = client.kill_process("550e8400-e29b-41d4-a716-446655440000") + >>> if result.get("success"): + ... print("Process killed successfully") + + + +#### list\_processes + +```python +def list_processes() -> Dict[str, Any] +``` + +List all background processes. + +Returns information about all currently running and recently completed background +processes. This includes both active processes and processes that have completed +(which remain in memory until server restart). + +**Returns**: + + Dict with a list of processes: + - processes: List of process objects, each containing: + - id: Process ID (UUID string) + - command: The command that was executed + - status: Process status (e.g., "running", "completed") + - pid: OS process ID (if running) + - exit_code: Exit code (if completed) + - started_at: ISO 8601 timestamp when process started + - completed_at: ISO 8601 timestamp when process completed (if applicable) + + +**Example**: + + >>> client = SandboxClient("http://localhost:8080", "secret") + >>> result = client.list_processes() + >>> for process in result.get("processes", []): + ... print(f"{process['id']}: {process['command']} - {process['status']}") From c8cebf0b36da3225598884e56154feaca561d316 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Thu, 6 Nov 2025 12:13:08 +0100 Subject: [PATCH 42/47] allow single region only set privileges to true by default use SANDBOX type --- koyeb/sandbox/sandbox.py | 18 +++++++++--------- koyeb/sandbox/utils.py | 23 +++++++++++++++-------- 2 files changed, 24 insertions(+), 17 deletions(-) diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 1666c592..de4771fc 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -110,7 +110,7 @@ def create( instance_type: str = "nano", exposed_port_protocol: Optional[str] = None, env: Optional[Dict[str, str]] = None, - regions: Optional[List[str]] = None, + region: Optional[str] = None, api_token: Optional[str] = None, timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, @@ -128,7 +128,7 @@ def create( If None, defaults to "http". If provided, must be one of "http" or "http2". env: Environment variables - regions: List of regions to deploy to (default: ["na"]) + region: Region to deploy to (default: "na") api_token: Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) timeout: Timeout for sandbox creation in seconds idle_timeout: Idle timeout configuration for scale-to-zero @@ -154,7 +154,7 @@ def create( instance_type=instance_type, exposed_port_protocol=exposed_port_protocol, env=env, - regions=regions, + region=region, api_token=api_token, timeout=timeout, idle_timeout=idle_timeout, @@ -174,7 +174,7 @@ def _create_sync( instance_type: str = "nano", exposed_port_protocol: Optional[str] = None, env: Optional[Dict[str, str]] = None, - regions: Optional[List[str]] = None, + region: Optional[str] = None, api_token: Optional[str] = None, timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, @@ -207,14 +207,14 @@ def _create_sync( app_id = app_response.app.id env_vars = build_env_vars(env) - docker_source = create_docker_source(image, []) + docker_source = create_docker_source(image, [], privileged=True) deployment_definition = create_deployment_definition( name=name, docker_source=docker_source, env_vars=env_vars, instance_type=instance_type, exposed_port_protocol=exposed_port_protocol, - regions=regions, + region=region, routes=routes, idle_timeout=idle_timeout, light_sleep_enabled=light_sleep_enabled, @@ -892,7 +892,7 @@ async def create( instance_type: str = "nano", exposed_port_protocol: Optional[str] = None, env: Optional[Dict[str, str]] = None, - regions: Optional[List[str]] = None, + region: Optional[str] = None, api_token: Optional[str] = None, timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, @@ -910,7 +910,7 @@ async def create( If None, defaults to "http". If provided, must be one of "http" or "http2". env: Environment variables - regions: List of regions to deploy to (default: ["na"]) + region: Region to deploy to (default: "na") api_token: Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) timeout: Timeout for sandbox creation in seconds idle_timeout: Idle timeout configuration for scale-to-zero @@ -939,7 +939,7 @@ async def create( instance_type=instance_type, exposed_port_protocol=exposed_port_protocol, env=env, - regions=regions, + region=region, api_token=api_token, timeout=timeout, idle_timeout=idle_timeout, diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index 829255bb..6878433e 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -153,13 +153,16 @@ def build_env_vars(env: Optional[Dict[str, str]]) -> List[DeploymentEnv]: return env_vars -def create_docker_source(image: str, command_args: List[str]) -> DockerSource: +def create_docker_source( + image: str, command_args: List[str], privileged: Optional[bool] = None +) -> DockerSource: """ Create Docker source configuration. Args: image: Docker image name command_args: Command and arguments to run (optional, empty list means use image default) + privileged: If True, run the container in privileged mode (default: None/False) Returns: DockerSource object @@ -168,6 +171,7 @@ def create_docker_source(image: str, command_args: List[str]) -> DockerSource: image=image, command=command_args[0] if command_args else None, args=list(command_args[1:]) if len(command_args) > 1 else None, + privileged=privileged, ) @@ -382,7 +386,7 @@ def create_deployment_definition( env_vars: List[DeploymentEnv], instance_type: str, exposed_port_protocol: Optional[str] = None, - regions: List[str] = None, + region: Optional[str] = None, routes: Optional[List[DeploymentRoute]] = None, idle_timeout: Optional[IdleTimeout] = None, light_sleep_enabled: bool = True, @@ -399,7 +403,7 @@ def create_deployment_definition( exposed_port_protocol: Protocol to expose ports with ("http" or "http2"). If None, defaults to "http". If provided, must be one of "http" or "http2". - regions: List of regions (defaults to ["na"]) + region: Region to deploy to (defaults to "na") routes: List of routes for public access idle_timeout: Idle timeout configuration (see IdleTimeout type) light_sleep_enabled: Whether light sleep is enabled for the instance type (default: True) @@ -408,8 +412,11 @@ def create_deployment_definition( Returns: DeploymentDefinition object """ - if regions is None: - regions = ["na"] + if region is None: + region = "na" + + # Convert single region string to list for API + regions_list = [region] # Always create ports with protocol (default to "http" if not specified) protocol = exposed_port_protocol if exposed_port_protocol is not None else "http" @@ -422,8 +429,8 @@ def create_deployment_definition( if enable_tcp_proxy: proxy_ports = create_koyeb_sandbox_proxy_ports() - # Always use WEB type - deployment_type = DeploymentDefinitionType.WEB + # Always use SANDBOX type + deployment_type = DeploymentDefinitionType.SANDBOX # Process idle_timeout sleep_idle_delay = _process_idle_timeout(idle_timeout, light_sleep_enabled) @@ -449,7 +456,7 @@ def create_deployment_definition( routes=routes, instance_types=[DeploymentInstanceType(type=instance_type)], scalings=scalings, - regions=regions, + regions=regions_list, ) From bb62c754e3eceaba9a70cba49a8f3ed5c8a3d4c8 Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Thu, 6 Nov 2025 16:10:23 +0100 Subject: [PATCH 43/47] remove unnecessary fields and methods --- examples/01_create_sandbox.py | 5 +- examples/01_create_sandbox_async.py | 5 +- examples/02_create_sandbox_with_timing.py | 21 ++-- .../02_create_sandbox_with_timing_async.py | 21 ++-- examples/15_get_sandbox.py | 5 +- examples/15_get_sandbox_async.py | 5 +- koyeb/sandbox/sandbox.py | 100 +++--------------- 7 files changed, 45 insertions(+), 117 deletions(-) diff --git a/examples/01_create_sandbox.py b/examples/01_create_sandbox.py index 386a1a5b..1ca091c2 100644 --- a/examples/01_create_sandbox.py +++ b/examples/01_create_sandbox.py @@ -21,10 +21,9 @@ def main(): api_token=api_token, ) - # Check status - status = sandbox.status() + # Check health is_healthy = sandbox.is_healthy() - print(f"Status: {status}, Healthy: {is_healthy}") + print(f"Healthy: {is_healthy}") # Test command result = sandbox.exec("echo 'Sandbox is ready!'") diff --git a/examples/01_create_sandbox_async.py b/examples/01_create_sandbox_async.py index 33dfe19c..dcdf0199 100644 --- a/examples/01_create_sandbox_async.py +++ b/examples/01_create_sandbox_async.py @@ -22,10 +22,9 @@ async def main(): api_token=api_token, ) - # Check status - status = await sandbox.status() + # Check health is_healthy = await sandbox.is_healthy() - print(f"Status: {status}, Healthy: {is_healthy}") + print(f"Healthy: {is_healthy}") # Test command result = await sandbox.exec("echo 'Sandbox is ready!'") diff --git a/examples/02_create_sandbox_with_timing.py b/examples/02_create_sandbox_with_timing.py index 1cca958d..d711cdc7 100644 --- a/examples/02_create_sandbox_with_timing.py +++ b/examples/02_create_sandbox_with_timing.py @@ -89,14 +89,13 @@ def main(run_long_tests=False): tracker.record("Sandbox creation", create_duration, "setup") print(f" ✓ took {create_duration:.1f}s") - # Check status with timing - print(" → Checking sandbox status...") - status_start = time.time() - status = sandbox.status() + # Check health with timing + print(" → Checking sandbox health...") + health_start = time.time() is_healthy = sandbox.is_healthy() - status_duration = time.time() - status_start - tracker.record("Status check", status_duration, "monitoring") - print(f" ✓ took {status_duration:.1f}s") + health_duration = time.time() - health_start + tracker.record("Health check", health_duration, "monitoring") + print(f" ✓ took {health_duration:.1f}s") # Test command execution with timing print(" → Executing initial test command...") @@ -123,14 +122,14 @@ def main(run_long_tests=False): tracker.record("Heavy computation", compute_duration, "long_tests") print(f" ✓ took {compute_duration:.1f}s") - # Long test 3: Multiple status checks - print(" → [LONG TEST] Multiple status checks...") + # Long test 3: Multiple health checks + print(" → [LONG TEST] Multiple health checks...") multi_check_start = time.time() for i in range(5): - sandbox.status() + sandbox.is_healthy() time.sleep(0.5) multi_check_duration = time.time() - multi_check_start - tracker.record("Multiple status checks (5x)", multi_check_duration, "long_tests") + tracker.record("Multiple health checks (5x)", multi_check_duration, "long_tests") print(f" ✓ took {multi_check_duration:.1f}s") except Exception as e: diff --git a/examples/02_create_sandbox_with_timing_async.py b/examples/02_create_sandbox_with_timing_async.py index c34f1b47..90bce722 100644 --- a/examples/02_create_sandbox_with_timing_async.py +++ b/examples/02_create_sandbox_with_timing_async.py @@ -93,14 +93,13 @@ async def main(run_long_tests=False): tracker.record("Sandbox creation", create_duration, "setup") print(f" ✓ took {create_duration:.1f}s") - # Check status with timing - print(" → Checking sandbox status...") - status_start = time.time() - await sandbox.status() + # Check health with timing + print(" → Checking sandbox health...") + health_start = time.time() await sandbox.is_healthy() - status_duration = time.time() - status_start - tracker.record("Status check", status_duration, "monitoring") - print(f" ✓ took {status_duration:.1f}s") + health_duration = time.time() - health_start + tracker.record("Health check", health_duration, "monitoring") + print(f" ✓ took {health_duration:.1f}s") # Test command execution with timing print(" → Executing initial test command...") @@ -129,15 +128,15 @@ async def main(run_long_tests=False): tracker.record("Heavy computation", compute_duration, "long_tests") print(f" ✓ took {compute_duration:.1f}s") - # Long test 3: Multiple status checks - print(" → [LONG TEST] Multiple status checks...") + # Long test 3: Multiple health checks + print(" → [LONG TEST] Multiple health checks...") multi_check_start = time.time() for i in range(5): - await sandbox.status() + await sandbox.is_healthy() await asyncio.sleep(0.5) multi_check_duration = time.time() - multi_check_start tracker.record( - "Multiple status checks (5x)", multi_check_duration, "long_tests" + "Multiple health checks (5x)", multi_check_duration, "long_tests" ) print(f" ✓ took {multi_check_duration:.1f}s") diff --git a/examples/15_get_sandbox.py b/examples/15_get_sandbox.py index 25b30b97..5eb161ae 100644 --- a/examples/15_get_sandbox.py +++ b/examples/15_get_sandbox.py @@ -50,10 +50,9 @@ def main(): assert original_sandbox.id == retrieved_sandbox.id, "Sandbox IDs should match!" print(" ✓ Confirmed: Same sandbox retrieved") - # Check status - status = retrieved_sandbox.status() + # Check health is_healthy = retrieved_sandbox.is_healthy() - print(f" Status: {status}, Healthy: {is_healthy}") + print(f" Healthy: {is_healthy}") # Execute a command with the retrieved sandbox if is_healthy: diff --git a/examples/15_get_sandbox_async.py b/examples/15_get_sandbox_async.py index ec3fefec..e80789c9 100644 --- a/examples/15_get_sandbox_async.py +++ b/examples/15_get_sandbox_async.py @@ -51,10 +51,9 @@ async def main(): assert original_sandbox.id == retrieved_sandbox.id, "Sandbox IDs should match!" print(" ✓ Confirmed: Same sandbox retrieved") - # Check status - status = await retrieved_sandbox.status() + # Check health is_healthy = await retrieved_sandbox.is_healthy() - print(f" Status: {status}, Healthy: {is_healthy}") + print(f" Healthy: {is_healthy}") # Execute a command with the retrieved sandbox if is_healthy: diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index de4771fc..5c296e1c 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -31,7 +31,6 @@ create_koyeb_sandbox_routes, create_sandbox_client, get_api_client, - is_sandbox_healthy, logger, run_sync_in_executor, validate_port, @@ -80,7 +79,6 @@ def __init__( sandbox_id: str, app_id: str, service_id: str, - instance_id: str, name: Optional[str] = None, api_token: Optional[str] = None, sandbox_secret: Optional[str] = None, @@ -88,7 +86,6 @@ def __init__( self.sandbox_id = sandbox_id self.app_id = app_id self.service_id = service_id - self.instance_id = instance_id self.name = name self.api_token = api_token self.sandbox_secret = sandbox_secret @@ -224,41 +221,11 @@ def _create_sync( create_service = CreateService(app_id=app_id, definition=deployment_definition) service_response = services_api.create_service(service=create_service) service_id = service_response.service.id - deployment_id = service_response.service.latest_deployment_id - - deployments_api = DeploymentsApi(services_api.api_client) - - max_wait = min(timeout // 2, 60) if timeout > 60 else timeout - wait_interval = 0.5 - start_time = time.time() - - while time.time() - start_time < max_wait: - try: - scaling_response = deployments_api.get_deployment_scaling( - id=deployment_id - ) - - if scaling_response.replicas and scaling_response.replicas[0].instances: - instance_id = scaling_response.replicas[0].instances[0].id - break - else: - logger.debug( - f"Waiting for instances to be created... (elapsed: {time.time() - start_time:.1f}s)" - ) - time.sleep(wait_interval) - except Exception as e: - logger.warning(f"Error getting deployment scaling: {e}") - time.sleep(wait_interval) - else: - raise SandboxError( - f"No instances found in deployment after {max_wait} seconds" - ) return cls( sandbox_id=name, app_id=app_id, service_id=service_id, - instance_id=instance_id, name=name, api_token=api_token, sandbox_secret=sandbox_secret, @@ -314,7 +281,6 @@ def get_from_id( # Get deployment to extract sandbox_secret from env vars deployment_id = service.active_deployment_id or service.latest_deployment_id sandbox_secret = None - instance_id = None if deployment_id: try: @@ -329,36 +295,13 @@ def get_from_id( if env_var.key == "SANDBOX_SECRET": sandbox_secret = env_var.value break - - # Get instance_id from deployment scaling - try: - scaling_response = deployments_api.get_deployment_scaling( - id=deployment_id - ) - if ( - scaling_response.replicas - and scaling_response.replicas[0].instances - and len(scaling_response.replicas[0].instances) > 0 - ): - instance_id = scaling_response.replicas[0].instances[0].id - except Exception: - logger.debug( - f"Could not get instance for deployment {deployment_id}" - ) except Exception as e: logger.debug(f"Could not get deployment {deployment_id}: {e}") - if not instance_id: - raise SandboxError( - f"Could not find instance for sandbox {id}. " - "The sandbox may not be fully provisioned yet." - ) - return cls( sandbox_id=service.id, app_id=service.app_id, service_id=service.id, - instance_id=instance_id, name=sandbox_name, api_token=api_token, sandbox_secret=sandbox_secret, @@ -391,12 +334,7 @@ def wait_ready( time.sleep(poll_interval) continue - is_healthy = is_sandbox_healthy( - self.instance_id, - sandbox_url=sandbox_url, - sandbox_secret=self.sandbox_secret, - api_token=self.api_token, - ) + is_healthy = self.is_healthy() if is_healthy: return True @@ -561,22 +499,25 @@ def _check_response_error(self, response: Dict, operation: str) -> None: error_msg = response.get("error", "Unknown error") raise SandboxError(f"Failed to {operation}: {error_msg}") - def status(self) -> str: - """Get current sandbox status""" - from .utils import get_sandbox_status - - status = get_sandbox_status(self.instance_id, self.api_token) - return status.value - def is_healthy(self) -> bool: """Check if sandbox is healthy and ready for operations""" sandbox_url = self._get_sandbox_url() - return is_sandbox_healthy( - self.instance_id, - sandbox_url=sandbox_url, - sandbox_secret=self.sandbox_secret, - api_token=self.api_token, - ) + if not sandbox_url or not self.sandbox_secret: + return False + + # Check executor health directly - this is what matters for operations + # If executor is healthy, the sandbox is usable (will wake up service if needed) + try: + from .executor_client import SandboxClient + + client = SandboxClient(sandbox_url, self.sandbox_secret) + health_response = client.health() + if isinstance(health_response, dict): + status = health_response.get("status", "").lower() + return status in ["ok", "healthy", "ready"] + return True # If we got a response, consider it healthy + except Exception: + return False @property def filesystem(self) -> "SandboxFilesystem": @@ -874,7 +815,6 @@ async def get_from_id( sandbox_id=sync_sandbox.sandbox_id, app_id=sync_sandbox.app_id, service_id=sync_sandbox.service_id, - instance_id=sync_sandbox.instance_id, name=sync_sandbox.name, api_token=sync_sandbox.api_token, sandbox_secret=sync_sandbox.sandbox_secret, @@ -952,7 +892,6 @@ async def create( sandbox_id=sync_result.sandbox_id, app_id=sync_result.app_id, service_id=sync_result.service_id, - instance_id=sync_result.instance_id, name=sync_result.name, api_token=sync_result.api_token, sandbox_secret=sync_result.sandbox_secret, @@ -1030,11 +969,6 @@ async def delete(self) -> None: """Delete the sandbox instance asynchronously.""" pass - @async_wrapper("status") - async def status(self) -> str: - """Get current sandbox status asynchronously""" - pass - @async_wrapper("is_healthy") async def is_healthy(self) -> bool: """Check if sandbox is healthy and ready for operations asynchronously""" From c735c293834fd4c5f6d04e91d63464b4c81c761e Mon Sep 17 00:00:00 2001 From: Edouard Bonlieu Date: Thu, 13 Nov 2025 14:32:54 +0100 Subject: [PATCH 44/47] raise exception when sandbox creation timeout is reached --- koyeb/sandbox/__init__.py | 3 ++- koyeb/sandbox/sandbox.py | 25 +++++++++++++++++++++++-- koyeb/sandbox/utils.py | 4 ++++ 3 files changed, 29 insertions(+), 3 deletions(-) diff --git a/koyeb/sandbox/__init__.py b/koyeb/sandbox/__init__.py index ac993c87..41b6f352 100644 --- a/koyeb/sandbox/__init__.py +++ b/koyeb/sandbox/__init__.py @@ -17,7 +17,7 @@ ) from .filesystem import FileInfo, SandboxFilesystem from .sandbox import AsyncSandbox, ExposedPort, ProcessInfo, Sandbox -from .utils import SandboxError +from .utils import SandboxError, SandboxTimeoutError __all__ = [ "Sandbox", @@ -28,6 +28,7 @@ "FileInfo", "SandboxStatus", "SandboxError", + "SandboxTimeoutError", "CommandResult", "CommandStatus", "SandboxCommandError", diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 5c296e1c..6c6122bd 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -23,6 +23,7 @@ DEFAULT_POLL_INTERVAL, IdleTimeout, SandboxError, + SandboxTimeoutError, _is_light_sleep_enabled, async_wrapper, build_env_vars, @@ -137,6 +138,10 @@ def create( Returns: Sandbox: A new Sandbox instance + + Raises: + ValueError: If API token is not provided + SandboxTimeoutError: If wait_ready is True and sandbox does not become ready within timeout """ if api_token is None: api_token = os.getenv("KOYEB_API_TOKEN") @@ -159,7 +164,13 @@ def create( ) if wait_ready: - sandbox.wait_ready(timeout=timeout) + is_ready = sandbox.wait_ready(timeout=timeout) + if not is_ready: + raise SandboxTimeoutError( + f"Sandbox '{sandbox.name}' did not become ready within {timeout} seconds. " + f"The sandbox was created but may not be ready yet. " + f"You can check its status with sandbox.is_healthy() or call sandbox.wait_ready() again." + ) return sandbox @@ -862,6 +873,10 @@ async def create( Returns: AsyncSandbox: A new AsyncSandbox instance + + Raises: + ValueError: If API token is not provided + SandboxTimeoutError: If wait_ready is True and sandbox does not become ready within timeout """ if api_token is None: api_token = os.getenv("KOYEB_API_TOKEN") @@ -899,7 +914,13 @@ async def create( sandbox._created_at = sync_result._created_at if wait_ready: - await sandbox.wait_ready(timeout=timeout) + is_ready = await sandbox.wait_ready(timeout=timeout) + if not is_ready: + raise SandboxTimeoutError( + f"Sandbox '{sandbox.name}' did not become ready within {timeout} seconds. " + f"The sandbox was created but may not be ready yet. " + f"You can check its status with sandbox.is_healthy() or call sandbox.wait_ready() again." + ) return sandbox diff --git a/koyeb/sandbox/utils.py b/koyeb/sandbox/utils.py index 6878433e..cf07edbe 100644 --- a/koyeb/sandbox/utils.py +++ b/koyeb/sandbox/utils.py @@ -681,3 +681,7 @@ def create_sandbox_client( class SandboxError(Exception): """Base exception for sandbox operations""" + + +class SandboxTimeoutError(SandboxError): + """Raised when a sandbox operation times out""" From 1ce139d0e315feef28775a39654f7abd058cd056 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Fri, 14 Nov 2025 15:28:37 +0100 Subject: [PATCH 45/47] Switch default instance type to micro by default --- koyeb/sandbox/sandbox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index 6c6122bd..dc7e7cbe 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -105,7 +105,7 @@ def create( image: str = "koyeb/sandbox", name: str = "quick-sandbox", wait_ready: bool = True, - instance_type: str = "nano", + instance_type: str = "micro", exposed_port_protocol: Optional[str] = None, env: Optional[Dict[str, str]] = None, region: Optional[str] = None, From a75998bc65ee6f4d6e95b4d99b50bc378191d488 Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Fri, 14 Nov 2025 15:49:51 +0100 Subject: [PATCH 46/47] Add privileged parameter to create sandbox, default to false --- docs/sandbox.md | 8 ++++++-- koyeb/sandbox/sandbox.py | 9 ++++++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/docs/sandbox.md b/docs/sandbox.md index b2f63cce..5322f492 100644 --- a/docs/sandbox.md +++ b/docs/sandbox.md @@ -988,7 +988,8 @@ def create(cls, api_token: Optional[str] = None, timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, - enable_tcp_proxy: bool = False) -> Sandbox + enable_tcp_proxy: bool = False, + privileged: bool = False) -> Sandbox ``` Create a new sandbox instance. @@ -1012,6 +1013,7 @@ Create a new sandbox instance. - int > 0: Deep sleep only (e.g., 600 for 600s deep sleep) - dict: Explicit configuration with {"light_sleep": 300, "deep_sleep": 600} - `enable_tcp_proxy` - If True, enables TCP proxy for direct TCP access to port 3031 +- `privileged` - If True, run the container in privileged mode (default: False) **Returns**: @@ -1455,7 +1457,8 @@ async def create(cls, api_token: Optional[str] = None, timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, - enable_tcp_proxy: bool = False) -> AsyncSandbox + enable_tcp_proxy: bool = False, + privileged: bool = False) -> AsyncSandbox ``` Create a new sandbox instance with async support. @@ -1479,6 +1482,7 @@ Create a new sandbox instance with async support. - int > 0: Deep sleep only (e.g., 600 for 600s deep sleep) - dict: Explicit configuration with {"light_sleep": 300, "deep_sleep": 600} - `enable_tcp_proxy` - If True, enables TCP proxy for direct TCP access to port 3031 +- `privileged` - If True, run the container in privileged mode (default: False) **Returns**: diff --git a/koyeb/sandbox/sandbox.py b/koyeb/sandbox/sandbox.py index dc7e7cbe..8701fc1a 100644 --- a/koyeb/sandbox/sandbox.py +++ b/koyeb/sandbox/sandbox.py @@ -113,6 +113,7 @@ def create( timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, enable_tcp_proxy: bool = False, + privileged: bool = False, ) -> Sandbox: """ Create a new sandbox instance. @@ -135,6 +136,7 @@ def create( - int > 0: Deep sleep only (e.g., 600 for 600s deep sleep) - dict: Explicit configuration with {"light_sleep": 300, "deep_sleep": 600} enable_tcp_proxy: If True, enables TCP proxy for direct TCP access to port 3031 + privileged: If True, run the container in privileged mode (default: False) Returns: Sandbox: A new Sandbox instance @@ -161,6 +163,7 @@ def create( timeout=timeout, idle_timeout=idle_timeout, enable_tcp_proxy=enable_tcp_proxy, + privileged=privileged, ) if wait_ready: @@ -187,6 +190,7 @@ def _create_sync( timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, enable_tcp_proxy: bool = False, + privileged: bool = False, ) -> Sandbox: """ Synchronous creation method that returns creation parameters. @@ -215,7 +219,7 @@ def _create_sync( app_id = app_response.app.id env_vars = build_env_vars(env) - docker_source = create_docker_source(image, [], privileged=True) + docker_source = create_docker_source(image, [], privileged=privileged) deployment_definition = create_deployment_definition( name=name, docker_source=docker_source, @@ -848,6 +852,7 @@ async def create( timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, enable_tcp_proxy: bool = False, + privileged: bool = False, ) -> AsyncSandbox: """ Create a new sandbox instance with async support. @@ -870,6 +875,7 @@ async def create( - int > 0: Deep sleep only (e.g., 600 for 600s deep sleep) - dict: Explicit configuration with {"light_sleep": 300, "deep_sleep": 600} enable_tcp_proxy: If True, enables TCP proxy for direct TCP access to port 3031 + privileged: If True, run the container in privileged mode (default: False) Returns: AsyncSandbox: A new AsyncSandbox instance @@ -899,6 +905,7 @@ async def create( timeout=timeout, idle_timeout=idle_timeout, enable_tcp_proxy=enable_tcp_proxy, + privileged=privileged, ), ) From d36b7e9766f52f9cc7ff96c0734fd2500cbe74ad Mon Sep 17 00:00:00 2001 From: Bastien Chatelard Date: Fri, 14 Nov 2025 15:57:20 +0100 Subject: [PATCH 47/47] Update sandbox.md --- docs/sandbox.md | 62 ++++++++++++++++++++++++++----------------------- 1 file changed, 33 insertions(+), 29 deletions(-) diff --git a/docs/sandbox.md b/docs/sandbox.md index 5322f492..257268ac 100644 --- a/docs/sandbox.md +++ b/docs/sandbox.md @@ -981,10 +981,10 @@ def create(cls, image: str = "koyeb/sandbox", name: str = "quick-sandbox", wait_ready: bool = True, - instance_type: str = "nano", + instance_type: str = "micro", exposed_port_protocol: Optional[str] = None, env: Optional[Dict[str, str]] = None, - regions: Optional[List[str]] = None, + region: Optional[str] = None, api_token: Optional[str] = None, timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, @@ -1004,7 +1004,7 @@ Create a new sandbox instance. If None, defaults to "http". If provided, must be one of "http" or "http2". - `env` - Environment variables -- `regions` - List of regions to deploy to (default: ["na"]) +- `region` - Region to deploy to (default: "na") - `api_token` - Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) - `timeout` - Timeout for sandbox creation in seconds - `idle_timeout` - Idle timeout configuration for scale-to-zero @@ -1019,6 +1019,12 @@ Create a new sandbox instance. **Returns**: - `Sandbox` - A new Sandbox instance + + +**Raises**: + +- `ValueError` - If API token is not provided +- `SandboxTimeoutError` - If wait_ready is True and sandbox does not become ready within timeout @@ -1137,16 +1143,6 @@ This is only available if enable_tcp_proxy=True was set when creating the sandbo Optional[tuple[str, int]]: A tuple of (host, port) or None if unavailable - - -#### status - -```python -def status() -> str -``` - -Get current sandbox status - #### is\_healthy @@ -1453,7 +1449,7 @@ async def create(cls, instance_type: str = "nano", exposed_port_protocol: Optional[str] = None, env: Optional[Dict[str, str]] = None, - regions: Optional[List[str]] = None, + region: Optional[str] = None, api_token: Optional[str] = None, timeout: int = 300, idle_timeout: Optional[IdleTimeout] = None, @@ -1473,7 +1469,7 @@ Create a new sandbox instance with async support. If None, defaults to "http". If provided, must be one of "http" or "http2". - `env` - Environment variables -- `regions` - List of regions to deploy to (default: ["na"]) +- `region` - Region to deploy to (default: "na") - `api_token` - Koyeb API token (if None, will try to get from KOYEB_API_TOKEN env var) - `timeout` - Timeout for sandbox creation in seconds - `idle_timeout` - Idle timeout configuration for scale-to-zero @@ -1488,6 +1484,12 @@ Create a new sandbox instance with async support. **Returns**: - `AsyncSandbox` - A new AsyncSandbox instance + + +**Raises**: + +- `ValueError` - If API token is not provided +- `SandboxTimeoutError` - If wait_ready is True and sandbox does not become ready within timeout @@ -1547,17 +1549,6 @@ async def delete() -> None Delete the sandbox instance asynchronously. - - -#### status - -```python -@async_wrapper("status") -async def status() -> str -``` - -Get current sandbox status asynchronously - #### is\_healthy @@ -1782,7 +1773,9 @@ Build environment variables list from dictionary. #### create\_docker\_source ```python -def create_docker_source(image: str, command_args: List[str]) -> DockerSource +def create_docker_source(image: str, + command_args: List[str], + privileged: Optional[bool] = None) -> DockerSource ``` Create Docker source configuration. @@ -1791,6 +1784,7 @@ Create Docker source configuration. - `image` - Docker image name - `command_args` - Command and arguments to run (optional, empty list means use image default) +- `privileged` - If True, run the container in privileged mode (default: None/False) **Returns**: @@ -1866,7 +1860,7 @@ def create_deployment_definition( env_vars: List[DeploymentEnv], instance_type: str, exposed_port_protocol: Optional[str] = None, - regions: List[str] = None, + region: Optional[str] = None, routes: Optional[List[DeploymentRoute]] = None, idle_timeout: Optional[IdleTimeout] = None, light_sleep_enabled: bool = True, @@ -1884,7 +1878,7 @@ Create deployment definition for a sandbox service. - `exposed_port_protocol` - Protocol to expose ports with ("http" or "http2"). If None, defaults to "http". If provided, must be one of "http" or "http2". -- `regions` - List of regions (defaults to ["na"]) +- `region` - Region to deploy to (defaults to "na") - `routes` - List of routes for public access - `idle_timeout` - Idle timeout configuration (see IdleTimeout type) - `light_sleep_enabled` - Whether light sleep is enabled for the instance type (default: True) @@ -2088,6 +2082,16 @@ class SandboxError(Exception) Base exception for sandbox operations + + +## SandboxTimeoutError Objects + +```python +class SandboxTimeoutError(SandboxError) +``` + +Raised when a sandbox operation times out + # koyeb/sandbox.executor\_client