Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .librarian/state.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-li
libraries:
- id: google-cloud-firestore
version: 2.21.0
last_generated_commit: 659ea6e98acc7d58661ce2aa7b4cf76a7ef3fd42
last_generated_commit: b60f5a5783d5ec0e8a8d254f73ad00316b9b646f
apis:
- path: google/firestore/v1
service_config: firestore_v1.yaml
Expand Down
15 changes: 15 additions & 0 deletions google/cloud/firestore_v1/gapic_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@
"delete_document"
]
},
"ExecutePipeline": {
"methods": [
"execute_pipeline"
]
},
"GetDocument": {
"methods": [
"get_document"
Expand Down Expand Up @@ -125,6 +130,11 @@
"delete_document"
]
},
"ExecutePipeline": {
"methods": [
"execute_pipeline"
]
},
"GetDocument": {
"methods": [
"get_document"
Expand Down Expand Up @@ -210,6 +220,11 @@
"delete_document"
]
},
"ExecutePipeline": {
"methods": [
"execute_pipeline"
]
},
"GetDocument": {
"methods": [
"get_document"
Expand Down
104 changes: 104 additions & 0 deletions google/cloud/firestore_v1/services/firestore/async_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
from google.cloud.firestore_v1.types import common
from google.cloud.firestore_v1.types import document
from google.cloud.firestore_v1.types import document as gf_document
from google.cloud.firestore_v1.types import explain_stats
from google.cloud.firestore_v1.types import firestore
from google.cloud.firestore_v1.types import query
from google.cloud.firestore_v1.types import query_profile
Expand Down Expand Up @@ -1248,6 +1249,109 @@ async def sample_run_query():
# Done; return the response.
return response

def execute_pipeline(
self,
request: Optional[Union[firestore.ExecutePipelineRequest, dict]] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> Awaitable[AsyncIterable[firestore.ExecutePipelineResponse]]:
r"""Executes a pipeline query.

.. code-block:: python

# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import firestore_v1

async def sample_execute_pipeline():
# Create a client
client = firestore_v1.FirestoreAsyncClient()

# Initialize request argument(s)
structured_pipeline = firestore_v1.StructuredPipeline()
structured_pipeline.pipeline.stages.name = "name_value"

request = firestore_v1.ExecutePipelineRequest(
structured_pipeline=structured_pipeline,
transaction=b'transaction_blob',
database="database_value",
)

# Make the request
stream = await client.execute_pipeline(request=request)

# Handle the response
async for response in stream:
print(response)

Args:
request (Optional[Union[google.cloud.firestore_v1.types.ExecutePipelineRequest, dict]]):
The request object. The request for
[Firestore.ExecutePipeline][google.firestore.v1.Firestore.ExecutePipeline].
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
sent along with the request as metadata. Normally, each value must be of type `str`,
but for metadata keys ending with the suffix `-bin`, the corresponding values must
be of type `bytes`.

Returns:
AsyncIterable[google.cloud.firestore_v1.types.ExecutePipelineResponse]:
The response for [Firestore.Execute][].
"""
# Create or coerce a protobuf request object.
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, firestore.ExecutePipelineRequest):
request = firestore.ExecutePipelineRequest(request)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._client._transport._wrapped_methods[
self._client._transport.execute_pipeline
]

header_params = {}

routing_param_regex = re.compile("^projects/(?P<project_id>[^/]+)(?:/.*)?$")
regex_match = routing_param_regex.match(request.database)
if regex_match and regex_match.group("project_id"):
header_params["project_id"] = regex_match.group("project_id")

routing_param_regex = re.compile(
"^projects/[^/]+/databases/(?P<database_id>[^/]+)(?:/.*)?$"
)
regex_match = routing_param_regex.match(request.database)
if regex_match and regex_match.group("database_id"):
header_params["database_id"] = regex_match.group("database_id")
Comment on lines +1324 to +1334

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

For performance, it's best to compile regular expressions only once at the module level, rather than inside a function that might be called frequently. This avoids the overhead of recompiling the regex on every call.

Please define _PROJECT_ID_REGEX and _DATABASE_ID_REGEX as module-level constants. I've also used more descriptive variable names for the match objects to improve clarity and avoid reusing the regex_match variable.

Suggested change
routing_param_regex = re.compile("^projects/(?P<project_id>[^/]+)(?:/.*)?$")
regex_match = routing_param_regex.match(request.database)
if regex_match and regex_match.group("project_id"):
header_params["project_id"] = regex_match.group("project_id")
routing_param_regex = re.compile(
"^projects/[^/]+/databases/(?P<database_id>[^/]+)(?:/.*)?$"
)
regex_match = routing_param_regex.match(request.database)
if regex_match and regex_match.group("database_id"):
header_params["database_id"] = regex_match.group("database_id")
# For performance, compile regexes at module level and reuse them.
# Using _PROJECT_ID_REGEX defined at module level.
project_id_match = _PROJECT_ID_REGEX.match(request.database)
if project_id_match and project_id_match.group("project_id"):
header_params["project_id"] = project_id_match.group("project_id")
# Using _DATABASE_ID_REGEX defined at module level.
database_id_match = _DATABASE_ID_REGEX.match(request.database)
if database_id_match and database_id_match.group("database_id"):
header_params["database_id"] = database_id_match.group("database_id")


if header_params:
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(header_params),
)

# Validate the universe domain.
self._client._validate_universe_domain()

# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# Done; return the response.
return response

def run_aggregation_query(
self,
request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None,
Expand Down
102 changes: 102 additions & 0 deletions google/cloud/firestore_v1/services/firestore/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@
from google.cloud.firestore_v1.types import common
from google.cloud.firestore_v1.types import document
from google.cloud.firestore_v1.types import document as gf_document
from google.cloud.firestore_v1.types import explain_stats
from google.cloud.firestore_v1.types import firestore
from google.cloud.firestore_v1.types import query
from google.cloud.firestore_v1.types import query_profile
Expand Down Expand Up @@ -1631,6 +1632,107 @@ def sample_run_query():
# Done; return the response.
return response

def execute_pipeline(
self,
request: Optional[Union[firestore.ExecutePipelineRequest, dict]] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> Iterable[firestore.ExecutePipelineResponse]:
r"""Executes a pipeline query.

.. code-block:: python

# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import firestore_v1

def sample_execute_pipeline():
# Create a client
client = firestore_v1.FirestoreClient()

# Initialize request argument(s)
structured_pipeline = firestore_v1.StructuredPipeline()
structured_pipeline.pipeline.stages.name = "name_value"

request = firestore_v1.ExecutePipelineRequest(
structured_pipeline=structured_pipeline,
transaction=b'transaction_blob',
database="database_value",
)

# Make the request
stream = client.execute_pipeline(request=request)

# Handle the response
for response in stream:
print(response)

Args:
request (Union[google.cloud.firestore_v1.types.ExecutePipelineRequest, dict]):
The request object. The request for
[Firestore.ExecutePipeline][google.firestore.v1.Firestore.ExecutePipeline].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
sent along with the request as metadata. Normally, each value must be of type `str`,
but for metadata keys ending with the suffix `-bin`, the corresponding values must
be of type `bytes`.

Returns:
Iterable[google.cloud.firestore_v1.types.ExecutePipelineResponse]:
The response for [Firestore.Execute][].
"""
# Create or coerce a protobuf request object.
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, firestore.ExecutePipelineRequest):
request = firestore.ExecutePipelineRequest(request)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.execute_pipeline]

header_params = {}

routing_param_regex = re.compile("^projects/(?P<project_id>[^/]+)(?:/.*)?$")
regex_match = routing_param_regex.match(request.database)
if regex_match and regex_match.group("project_id"):
header_params["project_id"] = regex_match.group("project_id")

routing_param_regex = re.compile(
"^projects/[^/]+/databases/(?P<database_id>[^/]+)(?:/.*)?$"
)
regex_match = routing_param_regex.match(request.database)
if regex_match and regex_match.group("database_id"):
header_params["database_id"] = regex_match.group("database_id")
Comment on lines +1705 to +1715

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

For performance, it's best to compile regular expressions only once at the module level, rather than inside a function that might be called frequently. This avoids the overhead of recompiling the regex on every call.

Please define _PROJECT_ID_REGEX and _DATABASE_ID_REGEX as module-level constants. I've also used more descriptive variable names for the match objects to improve clarity and avoid reusing the regex_match variable.

Suggested change
routing_param_regex = re.compile("^projects/(?P<project_id>[^/]+)(?:/.*)?$")
regex_match = routing_param_regex.match(request.database)
if regex_match and regex_match.group("project_id"):
header_params["project_id"] = regex_match.group("project_id")
routing_param_regex = re.compile(
"^projects/[^/]+/databases/(?P<database_id>[^/]+)(?:/.*)?$"
)
regex_match = routing_param_regex.match(request.database)
if regex_match and regex_match.group("database_id"):
header_params["database_id"] = regex_match.group("database_id")
# For performance, compile regexes at module level and reuse them.
# Using _PROJECT_ID_REGEX defined at module level.
project_id_match = _PROJECT_ID_REGEX.match(request.database)
if project_id_match and project_id_match.group("project_id"):
header_params["project_id"] = project_id_match.group("project_id")
# Using _DATABASE_ID_REGEX defined at module level.
database_id_match = _DATABASE_ID_REGEX.match(request.database)
if database_id_match and database_id_match.group("database_id"):
header_params["database_id"] = database_id_match.group("database_id")


if header_params:
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(header_params),
)

# Validate the universe domain.
self._validate_universe_domain()

# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# Done; return the response.
return response

def run_aggregation_query(
self,
request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None,
Expand Down
29 changes: 29 additions & 0 deletions google/cloud/firestore_v1/services/firestore/transports/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,6 +291,23 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=300.0,
client_info=client_info,
),
self.execute_pipeline: gapic_v1.method.wrap_method(
self.execute_pipeline,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ResourceExhausted,
core_exceptions.ServiceUnavailable,
),
deadline=300.0,
),
default_timeout=300.0,
client_info=client_info,
),
self.run_aggregation_query: gapic_v1.method.wrap_method(
self.run_aggregation_query,
default_retry=retries.Retry(
Expand Down Expand Up @@ -514,6 +531,18 @@ def run_query(
]:
raise NotImplementedError()

@property
def execute_pipeline(
self,
) -> Callable[
[firestore.ExecutePipelineRequest],
Union[
firestore.ExecutePipelineResponse,
Awaitable[firestore.ExecutePipelineResponse],
],
]:
raise NotImplementedError()

@property
def run_aggregation_query(
self,
Expand Down
28 changes: 28 additions & 0 deletions google/cloud/firestore_v1/services/firestore/transports/grpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -573,6 +573,34 @@ def run_query(
)
return self._stubs["run_query"]

@property
def execute_pipeline(
self,
) -> Callable[
[firestore.ExecutePipelineRequest], firestore.ExecutePipelineResponse
]:
r"""Return a callable for the execute pipeline method over gRPC.

Executes a pipeline query.

Returns:
Callable[[~.ExecutePipelineRequest],
~.ExecutePipelineResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "execute_pipeline" not in self._stubs:
self._stubs["execute_pipeline"] = self._logged_channel.unary_stream(
"/google.firestore.v1.Firestore/ExecutePipeline",
request_serializer=firestore.ExecutePipelineRequest.serialize,
response_deserializer=firestore.ExecutePipelineResponse.deserialize,
)
return self._stubs["execute_pipeline"]

@property
def run_aggregation_query(
self,
Expand Down
Loading
Loading