From 2f59484528bc975d6af5e4afebfbac8dadd41edf Mon Sep 17 00:00:00 2001 From: Sara Robinson Date: Mon, 17 Nov 2025 07:05:46 -0800 Subject: [PATCH] chore: GenAI SDK client - fix mypy errors in datasets module PiperOrigin-RevId: 833324036 --- vertexai/_genai/datasets.py | 100 ++++++++++++++++++++++++++++++++++++ 1 file changed, 100 insertions(+) diff --git a/vertexai/_genai/datasets.py b/vertexai/_genai/datasets.py index 6bf2967a9d..0f3cf69594 100644 --- a/vertexai/_genai/datasets.py +++ b/vertexai/_genai/datasets.py @@ -611,6 +611,8 @@ def _wait_for_operation( ValueError: If the operation fails. """ response_operation_name = operation.name + if response_operation_name is None: + raise ValueError("Dataset operation name is empty.") dataset_id = response_operation_name.split("/datasets/")[1].split("/")[0] operation_id = response_operation_name.split("/")[-1] @@ -662,6 +664,30 @@ def create_from_bigquery( """ if isinstance(multimodal_dataset, dict): multimodal_dataset = types.MultimodalDataset(**multimodal_dataset) + if ( + not hasattr(multimodal_dataset, "metadata") + or multimodal_dataset.metadata is None + ): + raise ValueError("Multimodal dataset metadata is required.") + if ( + not hasattr(multimodal_dataset.metadata, "input_config") + or multimodal_dataset.metadata.input_config is None + ): + raise ValueError("Multimodal dataset input config is required.") + if ( + not hasattr(multimodal_dataset.metadata.input_config, "bigquery_source") + or multimodal_dataset.metadata.input_config.bigquery_source is None + ): + raise ValueError( + "Multimodal dataset input config bigquery source is required." + ) + if ( + not hasattr(multimodal_dataset.metadata.input_config.bigquery_source, "uri") + or multimodal_dataset.metadata.input_config.bigquery_source.uri is None + ): + raise ValueError( + "Multimodal dataset input config bigquery source uri is required." + ) if not multimodal_dataset.metadata.input_config.bigquery_source.uri.startswith( "bq://" ): @@ -710,6 +736,30 @@ def update_multimodal_dataset( """ if isinstance(multimodal_dataset, dict): multimodal_dataset = types.MultimodalDataset(**multimodal_dataset) + if ( + not hasattr(multimodal_dataset, "metadata") + or multimodal_dataset.metadata is None + ): + raise ValueError("Multimodal dataset metadata is required.") + if ( + not hasattr(multimodal_dataset.metadata, "input_config") + or multimodal_dataset.metadata.input_config is None + ): + raise ValueError("Multimodal dataset input config is required.") + if ( + not hasattr(multimodal_dataset.metadata.input_config, "bigquery_source") + or multimodal_dataset.metadata.input_config.bigquery_source is None + ): + raise ValueError( + "Multimodal dataset input config bigquery source is required." + ) + if ( + not hasattr(multimodal_dataset.metadata.input_config.bigquery_source, "uri") + or multimodal_dataset.metadata.input_config.bigquery_source.uri is None + ): + raise ValueError( + "Multimodal dataset input config bigquery source uri is required." + ) if not multimodal_dataset.metadata.input_config.bigquery_source.uri.startswith( "bq://" ): @@ -1258,6 +1308,8 @@ async def _wait_for_operation( ValueError: If the operation fails. """ response_operation_name = operation.name + if response_operation_name is None: + raise ValueError("Dataset operation name is empty.") dataset_id = response_operation_name.split("/datasets/")[1].split("/")[0] operation_id = response_operation_name.split("/")[-1] @@ -1309,6 +1361,30 @@ async def create_from_bigquery( """ if isinstance(multimodal_dataset, dict): multimodal_dataset = types.MultimodalDataset(**multimodal_dataset) + if ( + not hasattr(multimodal_dataset, "metadata") + or multimodal_dataset.metadata is None + ): + raise ValueError("Multimodal dataset metadata is required.") + if ( + not hasattr(multimodal_dataset.metadata, "input_config") + or multimodal_dataset.metadata.input_config is None + ): + raise ValueError("Multimodal dataset input config is required.") + if ( + not hasattr(multimodal_dataset.metadata.input_config, "bigquery_source") + or multimodal_dataset.metadata.input_config.bigquery_source is None + ): + raise ValueError( + "Multimodal dataset input config bigquery source is required." + ) + if ( + not hasattr(multimodal_dataset.metadata.input_config.bigquery_source, "uri") + or multimodal_dataset.metadata.input_config.bigquery_source.uri is None + ): + raise ValueError( + "Multimodal dataset input config bigquery source uri is required." + ) if not multimodal_dataset.metadata.input_config.bigquery_source.uri.startswith( "bq://" ): @@ -1353,6 +1429,30 @@ async def update_multimodal_dataset( """ if isinstance(multimodal_dataset, dict): multimodal_dataset = types.MultimodalDataset(**multimodal_dataset) + if ( + not hasattr(multimodal_dataset, "metadata") + or multimodal_dataset.metadata is None + ): + raise ValueError("Multimodal dataset metadata is required.") + if ( + not hasattr(multimodal_dataset.metadata, "input_config") + or multimodal_dataset.metadata.input_config is None + ): + raise ValueError("Multimodal dataset input config is required.") + if ( + not hasattr(multimodal_dataset.metadata.input_config, "bigquery_source") + or multimodal_dataset.metadata.input_config.bigquery_source is None + ): + raise ValueError( + "Multimodal dataset input config bigquery source is required." + ) + if ( + not hasattr(multimodal_dataset.metadata.input_config.bigquery_source, "uri") + or multimodal_dataset.metadata.input_config.bigquery_source.uri is None + ): + raise ValueError( + "Multimodal dataset input config bigquery source uri is required." + ) if not multimodal_dataset.metadata.input_config.bigquery_source.uri.startswith( "bq://" ):