diff --git a/tests/unit/vertexai/genai/replays/test_create_multimodal.py b/tests/unit/vertexai/genai/replays/test_create_multimodal.py index e9def4153f..504544675d 100644 --- a/tests/unit/vertexai/genai/replays/test_create_multimodal.py +++ b/tests/unit/vertexai/genai/replays/test_create_multimodal.py @@ -26,7 +26,7 @@ def test_create_dataset(client): - create_dataset_operation = client.multimodal._create_multimodal_dataset( + create_dataset_operation = client.datasets._create_multimodal_dataset( name="projects/vertex-sdk-dev/locations/us-central1", display_name="test-display-name", metadata_schema_uri=METADATA_SCHEMA_URI, @@ -50,7 +50,7 @@ def test_create_dataset(client): @pytest.mark.asyncio async def test_create_dataset_async(client): - create_dataset_operation = await client.aio.multimodal._create_multimodal_dataset( + create_dataset_operation = await client.aio.datasets._create_multimodal_dataset( name="projects/vertex-sdk-dev/locations/us-central1", display_name="test-display-name", metadata_schema_uri=METADATA_SCHEMA_URI, diff --git a/tests/unit/vertexai/genai/replays/test_create_multimodal_datasets.py b/tests/unit/vertexai/genai/replays/test_create_multimodal_datasets.py new file mode 100644 index 0000000000..267dd5c629 --- /dev/null +++ b/tests/unit/vertexai/genai/replays/test_create_multimodal_datasets.py @@ -0,0 +1,100 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# pylint: disable=protected-access,bad-continuation,missing-function-docstring + +from tests.unit.vertexai.genai.replays import pytest_helper +from vertexai._genai import types + +import pytest + +METADATA_SCHEMA_URI = ( + "gs://google-cloud-aiplatform/schema/dataset/metadata/multimodal_1.0.0.yaml" +) +BIGQUERY_TABLE_NAME = "vertex-sdk-dev.multimodal_dataset.test-table" + + +def test_create_dataset(client): + create_dataset_operation = client.datasets._create_multimodal_dataset( + name="projects/vertex-sdk-dev/locations/us-central1", + display_name="test-display-name", + metadata_schema_uri=METADATA_SCHEMA_URI, + metadata={ + "inputConfig": { + "bigquerySource": {"uri": f"bq://{BIGQUERY_TABLE_NAME}"}, + }, + }, + ) + assert isinstance(create_dataset_operation, types.MultimodalDatasetOperation) + assert create_dataset_operation + + +def test_create_dataset_from_bigquery(client): + dataset = client.datasets.create_multimodal_dataset_from_bigquery( + multimodal_dataset=types.MultimodalDataset( + display_name="test-from-bigquery", + bigquery_uri=BIGQUERY_TABLE_NAME, + ) + ) + assert isinstance(dataset, types.MultimodalDataset) + assert dataset.display_name == "test-from-bigquery" + + +pytestmark = pytest_helper.setup( + file=__file__, + globals_for_file=globals(), +) + +pytest_plugins = ("pytest_asyncio",) + + +@pytest.mark.asyncio +async def test_create_dataset_async(client): + create_dataset_operation = await client.aio.datasets._create_multimodal_dataset( + name="projects/vertex-sdk-dev/locations/us-central1", + display_name="test-display-name", + metadata_schema_uri=METADATA_SCHEMA_URI, + metadata={ + "inputConfig": { + "bigquerySource": {"uri": f"bq://{BIGQUERY_TABLE_NAME}"}, + }, + }, + ) + assert isinstance(create_dataset_operation, types.MultimodalDatasetOperation) + assert create_dataset_operation + + +@pytest.mark.asyncio +async def test_create_dataset_from_bigquery_async(client): + dataset = await client.aio.datasets.create_multimodal_dataset_from_bigquery( + multimodal_dataset=types.MultimodalDataset( + display_name="test-from-bigquery", + bigquery_uri=BIGQUERY_TABLE_NAME, + ) + ) + assert isinstance(dataset, types.MultimodalDataset) + assert dataset.display_name == "test-from-bigquery" + + +@pytest.mark.asyncio +async def test_create_dataset_from_bigquery_async_with_timeout(client): + dataset = await client.aio.datasets.create_multimodal_dataset_from_bigquery( + config=types.CreateMultimodalDatasetConfig(timeout=120), + multimodal_dataset=types.MultimodalDataset( + display_name="test-from-bigquery", + bigquery_uri=BIGQUERY_TABLE_NAME, + ), + ) + assert isinstance(dataset, types.MultimodalDataset) + assert dataset.display_name == "test-from-bigquery" diff --git a/vertexai/_genai/_datasets_utils.py b/vertexai/_genai/_datasets_utils.py new file mode 100644 index 0000000000..764d6b7a6d --- /dev/null +++ b/vertexai/_genai/_datasets_utils.py @@ -0,0 +1,20 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Utility functions for multimodal dataset.""" + + +METADATA_SCHEMA_URI = ( + "gs://google-cloud-aiplatform/schema/dataset/metadata/multimodal_1.0.0.yaml" +) diff --git a/vertexai/_genai/client.py b/vertexai/_genai/client.py index c8a53d2350..3418ff7c5a 100644 --- a/vertexai/_genai/client.py +++ b/vertexai/_genai/client.py @@ -55,7 +55,7 @@ def __init__(self, api_client: genai_client.Client): self._agent_engines = None self._prompt_optimizer = None self._prompts = None - self._multimodal = None + self._datasets = None @property @_common.experimental_warning( @@ -121,16 +121,16 @@ def prompts(self): @property @_common.experimental_warning( - "The Vertex SDK GenAI async multimodal module is experimental, " + "The Vertex SDK GenAI async datasets module is experimental, " "and may change in future versions." ) - def multimodal(self): - if self._multimodal is None: - self._multimodal = importlib.import_module( - ".multimodal", + def datasets(self): + if self._datasets is None: + self._datasets = importlib.import_module( + ".datasets", __package__, ) - return self._multimodal.AsyncMultimodal(self._api_client) + return self._datasets.AsyncDatasets(self._api_client) class Client: @@ -192,7 +192,7 @@ def __init__( self._prompt_optimizer = None self._agent_engines = None self._prompts = None - self._multimodal = None + self._datasets = None @property def evals(self) -> Any: @@ -282,13 +282,13 @@ def prompts(self): @property @_common.experimental_warning( - "The Vertex SDK GenAI multimodal module is experimental, " + "The Vertex SDK GenAI datasets module is experimental, " "and may change in future versions." ) - def multimodal(self): - if self._multimodal is None: - self._multimodal = importlib.import_module( - ".multimodal", + def datasets(self): + if self._datasets is None: + self._datasets = importlib.import_module( + ".datasets", __package__, ) - return self._multimodal.Multimodal(self._api_client) + return self._datasets.Datasets(self._api_client) diff --git a/vertexai/_genai/datasets.py b/vertexai/_genai/datasets.py new file mode 100644 index 0000000000..ecb39d6fc5 --- /dev/null +++ b/vertexai/_genai/datasets.py @@ -0,0 +1,545 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Code generated by the Google Gen AI SDK generator DO NOT EDIT. + +import asyncio +import json +import logging +import time +from typing import Any, Optional, Union +from urllib.parse import urlencode + +from google.genai import _api_module +from google.genai import _common +from google.genai import types as genai_types +from google.genai._common import get_value_by_path as getv +from google.genai._common import set_value_by_path as setv + +from . import _datasets_utils +from . import types + + +logger = logging.getLogger("vertexai_genai.datasets") + + +def _CreateMultimodalDatasetParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["config"]) is not None: + setv(to_object, ["config"], getv(from_object, ["config"])) + + if getv(from_object, ["name"]) is not None: + setv(to_object, ["name"], getv(from_object, ["name"])) + + if getv(from_object, ["display_name"]) is not None: + setv(to_object, ["displayName"], getv(from_object, ["display_name"])) + + if getv(from_object, ["metadata_schema_uri"]) is not None: + setv( + to_object, ["metadataSchemaUri"], getv(from_object, ["metadata_schema_uri"]) + ) + + if getv(from_object, ["metadata"]) is not None: + setv(to_object, ["metadata"], getv(from_object, ["metadata"])) + + if getv(from_object, ["description"]) is not None: + setv(to_object, ["description"], getv(from_object, ["description"])) + + if getv(from_object, ["encryption_spec"]) is not None: + setv(to_object, ["encryptionSpec"], getv(from_object, ["encryption_spec"])) + + return to_object + + +def _GetMultimodalDatasetOperationParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["config"]) is not None: + setv(to_object, ["config"], getv(from_object, ["config"])) + + if getv(from_object, ["dataset_id"]) is not None: + setv(to_object, ["_url", "dataset_id"], getv(from_object, ["dataset_id"])) + + if getv(from_object, ["operation_id"]) is not None: + setv(to_object, ["_url", "operation_id"], getv(from_object, ["operation_id"])) + + return to_object + + +class Datasets(_api_module.BaseModule): + + def _create_multimodal_dataset( + self, + *, + config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None, + name: Optional[str] = None, + display_name: Optional[str] = None, + metadata_schema_uri: Optional[str] = None, + metadata: Optional[types.SchemaTablesDatasetMetadataOrDict] = None, + description: Optional[str] = None, + encryption_spec: Optional[genai_types.EncryptionSpecOrDict] = None, + ) -> types.MultimodalDatasetOperation: + """ + Creates a dataset resource to store multimodal datasets. + """ + + parameter_model = types._CreateMultimodalDatasetParameters( + config=config, + name=name, + display_name=display_name, + metadata_schema_uri=metadata_schema_uri, + metadata=metadata, + description=description, + encryption_spec=encryption_spec, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _CreateMultimodalDatasetParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets".format_map(request_url_dict) + else: + path = "datasets" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("post", path, request_dict, http_options) + + response_dict = {} if not response.body else json.loads(response.body) + + return_value = types.MultimodalDatasetOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def _get_multimodal_dataset_operation( + self, + *, + config: Optional[types.GetMultimodalDatasetOperationConfigOrDict] = None, + dataset_id: Optional[str] = None, + operation_id: Optional[str] = None, + ) -> types.MultimodalDatasetOperation: + """ + Gets the operation from creating a multimodal dataset. + """ + + parameter_model = types._GetMultimodalDatasetOperationParameters( + config=config, + dataset_id=dataset_id, + operation_id=operation_id, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetMultimodalDatasetOperationParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets/{dataset_id}/operations/{operation_id}".format_map( + request_url_dict + ) + else: + path = "datasets/{dataset_id}/operations/{operation_id}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("get", path, request_dict, http_options) + + response_dict = {} if not response.body else json.loads(response.body) + + return_value = types.MultimodalDatasetOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def _wait_for_operation( + self, + operation: types.MultimodalDatasetOperation, + timeout_seconds: int, + ) -> types.MultimodalDataset: + """Waits for a multimodal dataset operation to complete. + + Args: + operation: The multimodal dataset operation to wait for. + timeout_seconds: The maximum time in seconds to wait for the operation + to complete. + + Returns: + The name of the Multimodal Dataset resource from the operation result. + + Raises: + TimeoutError: If the operation does not complete within the timeout. + ValueError: If the operation fails. + """ + multimodal_operation: Optional[types.MultimodalDatasetOperation] = None + + response_operation_name = operation.name + dataset_id = response_operation_name.split("/datasets/")[1].split("/")[0] + operation_id = response_operation_name.split("/")[-1] + + start_time = time.time() + sleep_duration_seconds = 5 + wait_multiplier = 2 + max_wait_time_seconds = 60 + + while (time.time() - start_time) < timeout_seconds: + multimodal_operation = self._get_multimodal_dataset_operation( + dataset_id=dataset_id, + operation_id=operation_id, + ) + if multimodal_operation.done: + break + time.sleep(sleep_duration_seconds) + sleep_duration_seconds = min( + sleep_duration_seconds * wait_multiplier, max_wait_time_seconds + ) + else: + raise TimeoutError( + "Create multimodal dataset operation did not complete within the" + f" specified timeout of {timeout_seconds} seconds." + ) + if ( + not multimodal_operation + or multimodal_operation.response is None + or multimodal_operation.response.name is None + ): + logger.error( + f"Error creating multimodal dataset resource for the operation {operation.name}." + ) + raise ValueError("Error creating multimodal dataset resource.") + if ( + hasattr(multimodal_operation, "error") + and multimodal_operation.error is not None + ): + raise ValueError( + f"Error creating multimodal dataset resource: {multimodal_operation.error}" + ) + return multimodal_operation.response + + def create_multimodal_dataset_from_bigquery( + self, + *, + multimodal_dataset: types.MultimodalDataset, + config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None, + ) -> types.MultimodalDataset: + """Creates a multimodal dataset from a BigQuery table. + + Args: + config: + Optional. A configuration for creating the multimodal dataset. If not + provided, the default configuration will be used. + multimodal_dataset: + Required. A representation of amultimodal dataset. + + Returns: + A types.MultimodalDataset object representing a multimodal dataset. + """ + if not multimodal_dataset.bigquery_uri.startswith("bq://"): + multimodal_dataset.bigquery_uri = f"bq://{multimodal_dataset.bigquery_uri}" + if isinstance(config, dict): + config = types.CreateMultimodalDatasetConfig(**config) + elif not config: + config = types.CreateMultimodalDatasetConfig() + + multimodal_dataset_operation = self._create_multimodal_dataset( + config=config, + display_name=multimodal_dataset.display_name, + metadata_schema_uri=_datasets_utils.METADATA_SCHEMA_URI, + metadata={ + "inputConfig": { + "bigquerySource": {"uri": multimodal_dataset.bigquery_uri}, + }, + }, + ) + return self._wait_for_operation( + operation=multimodal_dataset_operation, + timeout_seconds=config.timeout, + ) + + +class AsyncDatasets(_api_module.BaseModule): + + async def _create_multimodal_dataset( + self, + *, + config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None, + name: Optional[str] = None, + display_name: Optional[str] = None, + metadata_schema_uri: Optional[str] = None, + metadata: Optional[types.SchemaTablesDatasetMetadataOrDict] = None, + description: Optional[str] = None, + encryption_spec: Optional[genai_types.EncryptionSpecOrDict] = None, + ) -> types.MultimodalDatasetOperation: + """ + Creates a dataset resource to store multimodal datasets. + """ + + parameter_model = types._CreateMultimodalDatasetParameters( + config=config, + name=name, + display_name=display_name, + metadata_schema_uri=metadata_schema_uri, + metadata=metadata, + description=description, + encryption_spec=encryption_spec, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _CreateMultimodalDatasetParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets".format_map(request_url_dict) + else: + path = "datasets" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "post", path, request_dict, http_options + ) + + response_dict = {} if not response.body else json.loads(response.body) + + return_value = types.MultimodalDatasetOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + async def _get_multimodal_dataset_operation( + self, + *, + config: Optional[types.GetMultimodalDatasetOperationConfigOrDict] = None, + dataset_id: Optional[str] = None, + operation_id: Optional[str] = None, + ) -> types.MultimodalDatasetOperation: + """ + Gets the operation from creating a multimodal dataset. + """ + + parameter_model = types._GetMultimodalDatasetOperationParameters( + config=config, + dataset_id=dataset_id, + operation_id=operation_id, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetMultimodalDatasetOperationParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets/{dataset_id}/operations/{operation_id}".format_map( + request_url_dict + ) + else: + path = "datasets/{dataset_id}/operations/{operation_id}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "get", path, request_dict, http_options + ) + + response_dict = {} if not response.body else json.loads(response.body) + + return_value = types.MultimodalDatasetOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + async def _wait_for_operation( + self, + operation: types.MultimodalDatasetOperation, + timeout_seconds: int, + ) -> types.MultimodalDataset: + """Waits for a multimodal dataset operation to complete. + + Args: + operation: The multimodal dataset operation to wait for. + timeout_seconds: The maximum time in seconds to wait for the operation + to complete. + + Returns: + The name of the Multimodal Dataset resource from the operation result. + + Raises: + TimeoutError: If the operation does not complete within the timeout. + ValueError: If the operation fails. + """ + multimodal_operation: Optional[types.MultimodalDatasetOperation] = None + + response_operation_name = operation.name + dataset_id = response_operation_name.split("/datasets/")[1].split("/")[0] + operation_id = response_operation_name.split("/")[-1] + + start_time = time.time() + sleep_duration_seconds = 5 + wait_multiplier = 2 + max_wait_time_seconds = 60 + + while (time.time() - start_time) < timeout_seconds: + multimodal_operation = await self._get_multimodal_dataset_operation( + dataset_id=dataset_id, + operation_id=operation_id, + ) + if multimodal_operation.done: + break + await asyncio.sleep(sleep_duration_seconds) + sleep_duration_seconds = min( + sleep_duration_seconds * wait_multiplier, max_wait_time_seconds + ) + else: + raise TimeoutError( + "Create multimodal dataset operation did not complete within the" + f" specified timeout of {timeout_seconds} seconds." + ) + if ( + not multimodal_operation + or multimodal_operation.response is None + or multimodal_operation.response.name is None + ): + logger.error( + f"Error creating multimodal dataset resource for the operation {operation.name}." + ) + raise ValueError("Error creating multimodal dataset resource.") + if ( + hasattr(multimodal_operation, "error") + and multimodal_operation.error is not None + ): + raise ValueError( + f"Error creating multimodal dataset resource: {multimodal_operation.error}" + ) + return multimodal_operation.response + + async def create_multimodal_dataset_from_bigquery( + self, + *, + multimodal_dataset: types.MultimodalDataset, + config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None, + ) -> types.MultimodalDataset: + """Creates a multimodal dataset from a BigQuery table. + + Args: + config: + Optional. A configuration for creating the multimodal dataset. If not + provided, the default configuration will be used. + multimodal_dataset: + Required. A representation of amultimodal dataset. + + Returns: + A types.MultimodalDataset object representing a multimodal dataset. + """ + if not multimodal_dataset.bigquery_uri.startswith("bq://"): + multimodal_dataset.bigquery_uri = f"bq://{multimodal_dataset.bigquery_uri}" + if isinstance(config, dict): + config = types.CreateMultimodalDatasetConfig(**config) + elif not config: + config = types.CreateMultimodalDatasetConfig() + + multimodal_dataset_operation = await self._create_multimodal_dataset( + config=config, + display_name=multimodal_dataset.display_name, + metadata_schema_uri=_datasets_utils.METADATA_SCHEMA_URI, + metadata={ + "inputConfig": { + "bigquerySource": {"uri": multimodal_dataset.bigquery_uri}, + }, + }, + ) + return await self._wait_for_operation( + operation=multimodal_dataset_operation, + timeout_seconds=config.timeout, + ) diff --git a/vertexai/_genai/types.py b/vertexai/_genai/types.py index a81b396f6e..e22d424c93 100644 --- a/vertexai/_genai/types.py +++ b/vertexai/_genai/types.py @@ -9697,6 +9697,11 @@ class CreateMultimodalDatasetConfig(_common.BaseModel): http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) + timeout: Optional[int] = Field( + default=90, + description="""The timeout for the create dataset request in seconds. If not set, + the default timeout is 90 seconds.""", + ) class CreateMultimodalDatasetConfigDict(TypedDict, total=False): @@ -9705,6 +9710,10 @@ class CreateMultimodalDatasetConfigDict(TypedDict, total=False): http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" + timeout: Optional[int] + """The timeout for the create dataset request in seconds. If not set, + the default timeout is 90 seconds.""" + CreateMultimodalDatasetConfigOrDict = Union[ CreateMultimodalDatasetConfig, CreateMultimodalDatasetConfigDict @@ -9821,6 +9830,36 @@ class _CreateMultimodalDatasetParametersDict(TypedDict, total=False): ] +class MultimodalDataset(_common.BaseModel): + """Represents a multimodal dataset.""" + + name: Optional[str] = Field( + default=None, description="""The ID of the multimodal dataset.""" + ) + display_name: Optional[str] = Field( + default=None, description="""The display name of the multimodal dataset.""" + ) + bigquery_uri: Optional[str] = Field( + default=None, description="""The BigQuery URI of the multimodal dataset.""" + ) + + +class MultimodalDatasetDict(TypedDict, total=False): + """Represents a multimodal dataset.""" + + name: Optional[str] + """The ID of the multimodal dataset.""" + + display_name: Optional[str] + """The display name of the multimodal dataset.""" + + bigquery_uri: Optional[str] + """The BigQuery URI of the multimodal dataset.""" + + +MultimodalDatasetOrDict = Union[MultimodalDataset, MultimodalDatasetDict] + + class MultimodalDatasetOperation(_common.BaseModel): """Represents the create dataset operation.""" @@ -9840,7 +9879,7 @@ class MultimodalDatasetOperation(_common.BaseModel): default=None, description="""The error result of the operation in case of failure or cancellation.""", ) - response: Optional[dict[str, Any]] = Field( + response: Optional[MultimodalDataset] = Field( default=None, description="""The result of the dataset operation.""" ) @@ -9860,7 +9899,7 @@ class MultimodalDatasetOperationDict(TypedDict, total=False): error: Optional[dict[str, Any]] """The error result of the operation in case of failure or cancellation.""" - response: Optional[dict[str, Any]] + response: Optional[MultimodalDatasetDict] """The result of the dataset operation.""" @@ -9869,6 +9908,55 @@ class MultimodalDatasetOperationDict(TypedDict, total=False): ] +class GetMultimodalDatasetOperationConfig(_common.BaseModel): + """Config for getting a multimodal dataset operation.""" + + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" + ) + + +class GetMultimodalDatasetOperationConfigDict(TypedDict, total=False): + """Config for getting a multimodal dataset operation.""" + + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" + + +GetMultimodalDatasetOperationConfigOrDict = Union[ + GetMultimodalDatasetOperationConfig, GetMultimodalDatasetOperationConfigDict +] + + +class _GetMultimodalDatasetOperationParameters(_common.BaseModel): + """Parameters for getting a dataset operation.""" + + config: Optional[GetMultimodalDatasetOperationConfig] = Field( + default=None, description="""""" + ) + dataset_id: Optional[str] = Field(default=None, description="""""") + operation_id: Optional[str] = Field(default=None, description="""""") + + +class _GetMultimodalDatasetOperationParametersDict(TypedDict, total=False): + """Parameters for getting a dataset operation.""" + + config: Optional[GetMultimodalDatasetOperationConfigDict] + """""" + + dataset_id: Optional[str] + """""" + + operation_id: Optional[str] + """""" + + +_GetMultimodalDatasetOperationParametersOrDict = Union[ + _GetMultimodalDatasetOperationParameters, + _GetMultimodalDatasetOperationParametersDict, +] + + class CreateDatasetConfig(_common.BaseModel): """Config for creating a dataset resource to store prompts."""