diff --git a/tests/unit/vertexai/genai/replays/test_delete_multimodal_datasets.py b/tests/unit/vertexai/genai/replays/test_delete_multimodal_datasets.py index 37216f94b7..5900d58062 100644 --- a/tests/unit/vertexai/genai/replays/test_delete_multimodal_datasets.py +++ b/tests/unit/vertexai/genai/replays/test_delete_multimodal_datasets.py @@ -39,7 +39,27 @@ def test_delete_dataset(client): name=name, ) assert isinstance(operation, types.MultimodalDatasetOperation) - assert operation + assert operation.done + + +def test_delete_dataset_with_public_method(client): + dataset = client.datasets.create_from_bigquery( + multimodal_dataset={ + "display_name": "test-from-bigquery", + "metadata": { + "inputConfig": { + "bigquerySource": {"uri": f"bq://{BIGQUERY_TABLE_NAME}"}, + }, + }, + } + ) + name = dataset.name.split("/datasets/")[1] + + operation = client.datasets.delete_multimodal_dataset( + name=name, + ) + assert isinstance(operation, types.MultimodalDatasetOperation) + assert operation.done pytestmark = pytest_helper.setup( @@ -52,7 +72,7 @@ def test_delete_dataset(client): @pytest.mark.asyncio async def test_delete_dataset_async(client): - dataset = client.datasets.create_from_bigquery( + dataset = await client.aio.datasets.create_from_bigquery( multimodal_dataset={ "display_name": "test-from-bigquery", "metadata": { @@ -64,8 +84,29 @@ async def test_delete_dataset_async(client): ) name = dataset.name.split("/datasets/")[1] - operation = client.datasets._delete_multimodal_dataset( + operation = await client.aio.datasets._delete_multimodal_dataset( name=name, ) assert isinstance(operation, types.MultimodalDatasetOperation) assert operation + + +@pytest.mark.asyncio +async def test_delete_dataset_with_public_method_async(client): + dataset = await client.aio.datasets.create_from_bigquery( + multimodal_dataset={ + "display_name": "test-from-bigquery", + "metadata": { + "inputConfig": { + "bigquerySource": {"uri": f"bq://{BIGQUERY_TABLE_NAME}"}, + }, + }, + } + ) + name = dataset.name.split("/datasets/")[1] + + operation = await client.aio.datasets.delete_multimodal_dataset( + name=name, + ) + assert isinstance(operation, types.MultimodalDatasetOperation) + assert operation.done diff --git a/tests/unit/vertexai/genai/replays/test_get_multimodal_datasets.py b/tests/unit/vertexai/genai/replays/test_get_multimodal_datasets.py index 8897a58039..f5facfe68f 100644 --- a/tests/unit/vertexai/genai/replays/test_get_multimodal_datasets.py +++ b/tests/unit/vertexai/genai/replays/test_get_multimodal_datasets.py @@ -29,7 +29,16 @@ def test_get_dataset(client): ) assert isinstance(dataset, types.MultimodalDataset) assert dataset.name.endswith(DATASET) - assert dataset.display_name == "test-from-bigquery" + assert dataset.display_name == "test-display-name" + + +def test_get_dataset_from_public_method(client): + dataset = client.datasets.get_multimodal_dataset( + name=DATASET, + ) + assert isinstance(dataset, types.MultimodalDataset) + assert dataset.name.endswith(DATASET) + assert dataset.display_name == "test-display-name" pytestmark = pytest_helper.setup( @@ -47,4 +56,14 @@ async def test_get_dataset_async(client): ) assert isinstance(dataset, types.MultimodalDataset) assert dataset.name.endswith(DATASET) - assert dataset.display_name == "test-from-bigquery" + assert dataset.display_name == "test-display-name" + + +@pytest.mark.asyncio +async def test_get_dataset_from_public_method_async(client): + dataset = await client.aio.datasets.get_multimodal_dataset( + name=DATASET, + ) + assert isinstance(dataset, types.MultimodalDataset) + assert dataset.name.endswith(DATASET) + assert dataset.display_name == "test-display-name" diff --git a/tests/unit/vertexai/genai/replays/test_update_multimodal_datasets.py b/tests/unit/vertexai/genai/replays/test_update_multimodal_datasets.py index 0af8d50f89..0c676f9391 100644 --- a/tests/unit/vertexai/genai/replays/test_update_multimodal_datasets.py +++ b/tests/unit/vertexai/genai/replays/test_update_multimodal_datasets.py @@ -27,17 +27,37 @@ def test_update_dataset(client): - operation = client.datasets._update_multimodal_dataset( + dataset = client.datasets._update_multimodal_dataset( name=DATASET, - display_name="test-display-name", - description="test-description", + display_name="test-display-name (updated with internal method)", + description="test-description (updated with internal method)", metadata={ "inputConfig": { "bigquerySource": {"uri": f"bq://{BIGQUERY_TABLE_NAME}"}, }, }, ) - assert isinstance(operation, types.MultimodalDatasetOperation) + assert isinstance(dataset, types.MultimodalDataset) + assert dataset.display_name == "test-display-name (updated with internal method)" + assert dataset.description == "test-description (updated with internal method)" + + +def test_update_dataset_with_public_method(client): + dataset = client.datasets.update_multimodal_dataset( + multimodal_dataset={ + "name": DATASET, + "display_name": "test-display-name (updated with public method)", + "description": "test-description (updated with public method)", + "metadata": { + "inputConfig": { + "bigquerySource": {"uri": f"bq://{BIGQUERY_TABLE_NAME}"}, + }, + }, + } + ) + assert isinstance(dataset, types.MultimodalDataset) + assert dataset.display_name == "test-display-name (updated with public method)" + assert dataset.description == "test-description (updated with public method)" pytestmark = pytest_helper.setup( @@ -50,14 +70,35 @@ def test_update_dataset(client): @pytest.mark.asyncio async def test_update_dataset_async(client): - operation = await client.aio.datasets._update_multimodal_dataset( + dataset = await client.aio.datasets._update_multimodal_dataset( name=DATASET, - display_name="test-display-name", + display_name="test-display-name (updated with internal method)", + description="test-description (updated with internal method)", metadata={ "inputConfig": { "bigquerySource": {"uri": f"bq://{BIGQUERY_TABLE_NAME}"}, }, }, ) - assert isinstance(operation, types.MultimodalDatasetOperation) - assert operation + assert isinstance(dataset, types.MultimodalDataset) + assert dataset.display_name == "test-display-name (updated with internal method)" + assert dataset.description == "test-description (updated with internal method)" + + +@pytest.mark.asyncio +async def test_update_dataset_with_public_method_async(client): + dataset = await client.aio.datasets.update_multimodal_dataset( + multimodal_dataset={ + "name": DATASET, + "display_name": "test-display-name (updated with public method)", + "description": "test-description (updated with public method)", + "metadata": { + "inputConfig": { + "bigquerySource": {"uri": f"bq://{BIGQUERY_TABLE_NAME}"}, + }, + }, + } + ) + assert isinstance(dataset, types.MultimodalDataset) + assert dataset.display_name == "test-display-name (updated with public method)" + assert dataset.description == "test-description (updated with public method)" diff --git a/vertexai/_genai/datasets.py b/vertexai/_genai/datasets.py index fb4ae6cc6e..4252665d00 100644 --- a/vertexai/_genai/datasets.py +++ b/vertexai/_genai/datasets.py @@ -458,10 +458,10 @@ def _update_multimodal_dataset( config: Optional[types.UpdateMultimodalDatasetConfigOrDict] = None, name: Optional[str] = None, display_name: Optional[str] = None, - metadata: Optional[dict[str, Any]] = None, + metadata: Optional[types.SchemaTablesDatasetMetadataOrDict] = None, description: Optional[str] = None, encryption_spec: Optional[genai_types.EncryptionSpecOrDict] = None, - ) -> types.MultimodalDatasetOperation: + ) -> types.MultimodalDataset: """ Updates a multimodal dataset resource. """ @@ -506,7 +506,7 @@ def _update_multimodal_dataset( response_dict = {} if not response.body else json.loads(response.body) - return_value = types.MultimodalDatasetOperation._from_response( + return_value = types.MultimodalDataset._from_response( response=response_dict, kwargs=parameter_model.model_dump() ) @@ -586,11 +586,11 @@ def create_from_bigquery( """Creates a multimodal dataset from a BigQuery table. Args: + multimodal_dataset: + Required. A representation of a multimodal dataset. config: Optional. A configuration for creating the multimodal dataset. If not provided, the default configuration will be used. - multimodal_dataset: - Required. A representation of amultimodal dataset. Returns: A types.MultimodalDataset object representing a multimodal dataset. @@ -619,6 +619,102 @@ def create_from_bigquery( timeout_seconds=config.timeout, ) + def update_multimodal_dataset( + self, + *, + multimodal_dataset: types.MultimodalDatasetOrDict, + config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None, + ) -> types.MultimodalDataset: + """Updates a multimodal dataset. + + Updatable fields include: + - display_name + - description + + Args: + multimodal_dataset: + Required. A representation of a multimodal dataset. + config: + Optional. A configuration for updating the multimodal dataset. If not + provided, the default configuration will be used. + + Returns: + A types.MultimodalDataset object representing the retrieved multimodal + dataset. + """ + if isinstance(multimodal_dataset, dict): + multimodal_dataset = types.MultimodalDataset(**multimodal_dataset) + if not multimodal_dataset.metadata.input_config.bigquery_source.uri.startswith( + "bq://" + ): + multimodal_dataset.metadata.input_config.bigquery_source.uri = ( + f"bq://{multimodal_dataset.metadata.input_config.bigquery_source.uri}" + ) + if isinstance(config, dict): + config = types.CreateMultimodalDatasetConfig(**config) + elif not config: + config = types.CreateMultimodalDatasetConfig() + + return self._update_multimodal_dataset( + config=config, + name=multimodal_dataset.name, + display_name=multimodal_dataset.display_name, + description=multimodal_dataset.description, + metadata=multimodal_dataset.metadata, + ) + + def get_multimodal_dataset( + self, + *, + name: str, + config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None, + ) -> types.MultimodalDataset: + """Gets a multimodal dataset. + + Args: + name: + Required. name of a multimodal dataset. + config: + Optional. A configuration for getting the multimodal dataset. If not + provided, the default configuration will be used. + + Returns: + A types.MultimodalDataset object representing the retrieved multimodal + dataset. + """ + if isinstance(config, dict): + config = types.CreateMultimodalDatasetConfig(**config) + elif not config: + config = types.CreateMultimodalDatasetConfig() + + return self._get_multimodal_dataset(config=config, name=name) + + def delete_multimodal_dataset( + self, + *, + name: str, + config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None, + ) -> types.MultimodalDatasetOperation: + """Deletes a multimodal dataset. + + Args: + name: + Required. name of a multimodal dataset. + config: + Optional. A configuration for deleting the multimodal dataset. If not + provided, the default configuration will be used. + + Returns: + A types.MultimodalDatasetOperation object representing the delete + multimodal dataset operation. + """ + if isinstance(config, dict): + config = types.CreateMultimodalDatasetConfig(**config) + elif not config: + config = types.CreateMultimodalDatasetConfig() + + return self._delete_multimodal_dataset(config=config, name=name) + class AsyncDatasets(_api_module.BaseModule): @@ -916,10 +1012,10 @@ async def _update_multimodal_dataset( config: Optional[types.UpdateMultimodalDatasetConfigOrDict] = None, name: Optional[str] = None, display_name: Optional[str] = None, - metadata: Optional[dict[str, Any]] = None, + metadata: Optional[types.SchemaTablesDatasetMetadataOrDict] = None, description: Optional[str] = None, encryption_spec: Optional[genai_types.EncryptionSpecOrDict] = None, - ) -> types.MultimodalDatasetOperation: + ) -> types.MultimodalDataset: """ Updates a multimodal dataset resource. """ @@ -966,7 +1062,7 @@ async def _update_multimodal_dataset( response_dict = {} if not response.body else json.loads(response.body) - return_value = types.MultimodalDatasetOperation._from_response( + return_value = types.MultimodalDataset._from_response( response=response_dict, kwargs=parameter_model.model_dump() ) @@ -1046,11 +1142,11 @@ async def create_from_bigquery( """Creates a multimodal dataset from a BigQuery table. Args: + multimodal_dataset: + Required. A representation of a multimodal dataset. config: Optional. A configuration for creating the multimodal dataset. If not provided, the default configuration will be used. - multimodal_dataset: - Required. A representation of a multimodal dataset. Returns: A types.MultimodalDataset object representing a multimodal dataset. @@ -1078,3 +1174,95 @@ async def create_from_bigquery( operation=multimodal_dataset_operation, timeout_seconds=config.timeout, ) + + async def update_multimodal_dataset( + self, + *, + multimodal_dataset: types.MultimodalDatasetOrDict, + config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None, + ) -> types.MultimodalDataset: + """Updates a multimodal dataset. + + Args: + multimodal_dataset: + Required. A representation of a multimodal dataset. + config: + Optional. A configuration for updating the multimodal dataset. If not + provided, the default configuration will be used. + + Returns: + A types.MultimodalDataset object representing the updated multimodal + dataset. + """ + if isinstance(multimodal_dataset, dict): + multimodal_dataset = types.MultimodalDataset(**multimodal_dataset) + if not multimodal_dataset.metadata.input_config.bigquery_source.uri.startswith( + "bq://" + ): + multimodal_dataset.metadata.input_config.bigquery_source.uri = ( + f"bq://{multimodal_dataset.metadata.input_config.bigquery_source.uri}" + ) + if isinstance(config, dict): + config = types.CreateMultimodalDatasetConfig(**config) + elif not config: + config = types.CreateMultimodalDatasetConfig() + + return await self._update_multimodal_dataset( + config=config, + name=multimodal_dataset.name, + display_name=multimodal_dataset.display_name, + description=multimodal_dataset.description, + metadata=multimodal_dataset.metadata, + ) + + async def get_multimodal_dataset( + self, + *, + name: str, + config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None, + ) -> types.MultimodalDataset: + """Gets a multimodal dataset. + + Args: + name: + Required. name of a multimodal dataset. + config: + Optional. A configuration for getting the multimodal dataset. If not + provided, the default configuration will be used. + + Returns: + A types.MultimodalDataset object representing the updated multimodal + dataset. + """ + if isinstance(config, dict): + config = types.CreateMultimodalDatasetConfig(**config) + elif not config: + config = types.CreateMultimodalDatasetConfig() + + return await self._get_multimodal_dataset(config=config, name=name) + + async def delete_multimodal_dataset( + self, + *, + name: str, + config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None, + ) -> types.MultimodalDatasetOperation: + """Deletes a multimodal dataset. + + Args: + name: + Required. name of a multimodal dataset. + config: + Optional. A configuration for deleting the multimodal dataset. If not + provided, the default configuration will be used. + + Returns: + A types.MultimodalDatasetOperation object representing the delete + multimodal dataset operation. + """ + if isinstance(config, dict): + config = types.CreateMultimodalDatasetConfig(**config) + elif not config: + config = types.CreateMultimodalDatasetConfig() + + return await self._delete_multimodal_dataset(config=config, name=name) diff --git a/vertexai/_genai/types/common.py b/vertexai/_genai/types/common.py index d2285eb195..88fd1a6ac8 100644 --- a/vertexai/_genai/types/common.py +++ b/vertexai/_genai/types/common.py @@ -10255,7 +10255,9 @@ class _UpdateMultimodalDatasetParameters(_common.BaseModel): ) name: Optional[str] = Field(default=None, description="""""") display_name: Optional[str] = Field(default=None, description="""""") - metadata: Optional[dict[str, Any]] = Field(default=None, description="""""") + metadata: Optional[SchemaTablesDatasetMetadata] = Field( + default=None, description="""""" + ) description: Optional[str] = Field(default=None, description="""""") encryption_spec: Optional[genai_types.EncryptionSpec] = Field( default=None, description="""""" @@ -10274,7 +10276,7 @@ class _UpdateMultimodalDatasetParametersDict(TypedDict, total=False): display_name: Optional[str] """""" - metadata: Optional[dict[str, Any]] + metadata: Optional[SchemaTablesDatasetMetadataDict] """""" description: Optional[str]