diff --git a/.coveragerc b/.coveragerc index 1952876088..d44bd6a37e 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,39 +1,16 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` +# The source of truth for this file is `.librarian/generator-input` -# Generated by synthtool. DO NOT EDIT! [run] branch = True -omit = - google/__init__.py - google/cloud/__init__.py [report] -fail_under = 100 show_missing = True +omit = + google/cloud/firestore_admin/__init__.py + google/cloud/firestore_admin/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py - google/cloud/__init__.py diff --git a/.librarian/state.yaml b/.librarian/state.yaml index 95cf19f137..cf8655717a 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -1,4 +1,4 @@ -image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:ce48ed695c727f7e13efd1fd68f466a55a0d772c87b69158720cec39965bc8b2 +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:8b2565040b8f040cbdad8eb4f861a7f2156c6a87965c7baa2d942eb16a57ff19 libraries: - id: google-cloud-firestore version: 2.21.0 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index 1d74695f70..0000000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# See https://pre-commit.com for more information -# See https://pre-commit.com/hooks.html for more hooks -repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml -- repo: https://github.com/psf/black - rev: 23.7.0 - hooks: - - id: black -- repo: https://github.com/pycqa/flake8 - rev: 6.1.0 - hooks: - - id: flake8 diff --git a/.repo-metadata.json b/.repo-metadata.json deleted file mode 100644 index 670bbc0e42..0000000000 --- a/.repo-metadata.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "firestore", - "name_pretty": "Cloud Firestore", - "product_documentation": "https://cloud.google.com/firestore", - "client_documentation": "https://cloud.google.com/python/docs/reference/firestore/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/5337669", - "release_level": "stable", - "language": "python", - "library_type": "GAPIC_COMBO", - "repo": "googleapis/python-firestore", - "distribution_name": "google-cloud-firestore", - "api_id": "firestore.googleapis.com", - "requires_billing": true, - "default_version": "v1", - "codeowner_team": "@googleapis/api-firestore @googleapis/api-firestore-partners", - "api_shortname": "firestore", - "api_description": "is a fully-managed NoSQL document database for mobile, web, and server development from Firebase and Google Cloud Platform. It's backed by a multi-region replicated database that ensures once data is committed, it's durable even in the face of unexpected disasters. Not only that, but despite being a distributed database, it's also strongly consistent and offers seamless integration with other Firebase and Google Cloud Platform products, including Google Cloud Functions." -} diff --git a/.trampolinerc b/.trampolinerc deleted file mode 100644 index 0080152373..0000000000 --- a/.trampolinerc +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Add required env vars here. -required_envvars+=( -) - -# Add env vars which are passed down into the container here. -pass_down_envvars+=( - "NOX_SESSION" - ############### - # Docs builds - ############### - "STAGING_BUCKET" - "V2_STAGING_BUCKET" - ################## - # Samples builds - ################## - "INSTALL_LIBRARY_FROM_SOURCE" - "RUN_TESTS_SESSION" - "BUILD_SPECIFIC_GCLOUD_PROJECT" - # Target directories. - "RUN_TESTS_DIRS" - # The nox session to run. - "RUN_TESTS_SESSION" -) - -# Prevent unintentional override on the default image. -if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ - [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." - exit 1 -fi - -# Define the default value if it makes sense. -if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then - TRAMPOLINE_IMAGE_UPLOAD="" -fi - -if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - TRAMPOLINE_IMAGE="" -fi - -if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then - TRAMPOLINE_DOCKERFILE="" -fi - -if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then - TRAMPOLINE_BUILD_FILE="" -fi diff --git a/README.rst b/README.rst deleted file mode 100644 index 71250f4f72..0000000000 --- a/README.rst +++ /dev/null @@ -1,197 +0,0 @@ -Python Client for Cloud Firestore API -===================================== - -|stable| |pypi| |versions| - -`Cloud Firestore API`_: is a fully-managed NoSQL document database for mobile, web, and server development from Firebase and Google Cloud Platform. It's backed by a multi-region replicated database that ensures once data is committed, it's durable even in the face of unexpected disasters. Not only that, but despite being a distributed database, it's also strongly consistent and offers seamless integration with other Firebase and Google Cloud Platform products, including Google Cloud Functions. - -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels -.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-firestore.svg - :target: https://pypi.org/project/google-cloud-firestore/ -.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-firestore.svg - :target: https://pypi.org/project/google-cloud-firestore/ -.. _Cloud Firestore API: https://cloud.google.com/firestore -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/firestore/latest/summary_overview -.. _Product Documentation: https://cloud.google.com/firestore - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. `Enable the Cloud Firestore API.`_ -4. `Set up Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Cloud Firestore API.: https://cloud.google.com/firestore -.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a virtual environment using `venv`_. `venv`_ is a tool that -creates isolated Python environments. These isolated environments can have separate -versions of Python packages, which allows you to isolate one project's dependencies -from the dependencies of other projects. - -With `venv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`venv`: https://docs.python.org/3/library/venv.html - - -Code samples and snippets -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Code samples and snippets live in the `samples/`_ folder. - -.. _samples/: https://github.com/googleapis/python-firestore/tree/main/samples - - -Supported Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^ -Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of -Python. - -Python >= 3.7, including 3.14 - -.. _active: https://devguide.python.org/devcycle/#in-development-main-branch -.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches - -Unsupported Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python <= 3.6 - -If you are using an `end-of-life`_ -version of Python, we recommend that you update as soon as possible to an actively supported version. - -.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - pip install google-cloud-firestore - - -Windows -^^^^^^^ - -.. code-block:: console - - py -m venv - .\\Scripts\activate - pip install google-cloud-firestore - -Next Steps -~~~~~~~~~~ - -- Read the `Client Library Documentation`_ for Cloud Firestore API - to see other available methods on the client. -- Read the `Cloud Firestore API Product documentation`_ to learn - more about the product and see How-to Guides. -- View this `README`_ to see the full list of Cloud - APIs that we cover. - -.. _Cloud Firestore API Product documentation: https://cloud.google.com/firestore -.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - -Environment-Based Examples -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - -Code-Based Examples -^^^^^^^^^^^^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud import library_v1 - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud import library_v1 - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/SECURITY.md b/SECURITY.md deleted file mode 100644 index 8b58ae9c01..0000000000 --- a/SECURITY.md +++ /dev/null @@ -1,7 +0,0 @@ -# Security Policy - -To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). - -The Google Security Team will respond within 5 working days of your report on g.co/vulnz. - -We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/docs/README.rst b/docs/README.rst deleted file mode 100644 index 71250f4f72..0000000000 --- a/docs/README.rst +++ /dev/null @@ -1,197 +0,0 @@ -Python Client for Cloud Firestore API -===================================== - -|stable| |pypi| |versions| - -`Cloud Firestore API`_: is a fully-managed NoSQL document database for mobile, web, and server development from Firebase and Google Cloud Platform. It's backed by a multi-region replicated database that ensures once data is committed, it's durable even in the face of unexpected disasters. Not only that, but despite being a distributed database, it's also strongly consistent and offers seamless integration with other Firebase and Google Cloud Platform products, including Google Cloud Functions. - -- `Client Library Documentation`_ -- `Product Documentation`_ - -.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels -.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-firestore.svg - :target: https://pypi.org/project/google-cloud-firestore/ -.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-firestore.svg - :target: https://pypi.org/project/google-cloud-firestore/ -.. _Cloud Firestore API: https://cloud.google.com/firestore -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/firestore/latest/summary_overview -.. _Product Documentation: https://cloud.google.com/firestore - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. `Enable the Cloud Firestore API.`_ -4. `Set up Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Cloud Firestore API.: https://cloud.google.com/firestore -.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a virtual environment using `venv`_. `venv`_ is a tool that -creates isolated Python environments. These isolated environments can have separate -versions of Python packages, which allows you to isolate one project's dependencies -from the dependencies of other projects. - -With `venv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`venv`: https://docs.python.org/3/library/venv.html - - -Code samples and snippets -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Code samples and snippets live in the `samples/`_ folder. - -.. _samples/: https://github.com/googleapis/python-firestore/tree/main/samples - - -Supported Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^ -Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of -Python. - -Python >= 3.7, including 3.14 - -.. _active: https://devguide.python.org/devcycle/#in-development-main-branch -.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches - -Unsupported Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python <= 3.6 - -If you are using an `end-of-life`_ -version of Python, we recommend that you update as soon as possible to an actively supported version. - -.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - pip install google-cloud-firestore - - -Windows -^^^^^^^ - -.. code-block:: console - - py -m venv - .\\Scripts\activate - pip install google-cloud-firestore - -Next Steps -~~~~~~~~~~ - -- Read the `Client Library Documentation`_ for Cloud Firestore API - to see other available methods on the client. -- Read the `Cloud Firestore API Product documentation`_ to learn - more about the product and see How-to Guides. -- View this `README`_ to see the full list of Cloud - APIs that we cover. - -.. _Cloud Firestore API Product documentation: https://cloud.google.com/firestore -.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - -Environment-Based Examples -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - -Code-Based Examples -^^^^^^^^^^^^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud import library_v1 - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud import library_v1 - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/docs/summary_overview.md b/docs/summary_overview.md deleted file mode 100644 index f983b56ecc..0000000000 --- a/docs/summary_overview.md +++ /dev/null @@ -1,22 +0,0 @@ -[ -This is a templated file. Adding content to this file may result in it being -reverted. Instead, if you want to place additional content, create an -"overview_content.md" file in `docs/` directory. The Sphinx tool will -pick up on the content and merge the content. -]: # - -# Cloud Firestore API - -Overview of the APIs available for Cloud Firestore API. - -## All entries - -Classes, methods and properties & attributes for -Cloud Firestore API. - -[classes](https://cloud.google.com/python/docs/reference/firestore/latest/summary_class.html) - -[methods](https://cloud.google.com/python/docs/reference/firestore/latest/summary_method.html) - -[properties and -attributes](https://cloud.google.com/python/docs/reference/firestore/latest/summary_property.html) diff --git a/google/cloud/firestore_admin_v1/gapic_metadata.json b/google/cloud/firestore_admin_v1/gapic_metadata.json deleted file mode 100644 index b8d4cb298c..0000000000 --- a/google/cloud/firestore_admin_v1/gapic_metadata.json +++ /dev/null @@ -1,508 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.firestore_admin_v1", - "protoPackage": "google.firestore.admin.v1", - "schema": "1.0", - "services": { - "FirestoreAdmin": { - "clients": { - "grpc": { - "libraryClient": "FirestoreAdminClient", - "rpcs": { - "BulkDeleteDocuments": { - "methods": [ - "bulk_delete_documents" - ] - }, - "CloneDatabase": { - "methods": [ - "clone_database" - ] - }, - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateIndex": { - "methods": [ - "create_index" - ] - }, - "CreateUserCreds": { - "methods": [ - "create_user_creds" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteIndex": { - "methods": [ - "delete_index" - ] - }, - "DeleteUserCreds": { - "methods": [ - "delete_user_creds" - ] - }, - "DisableUserCreds": { - "methods": [ - "disable_user_creds" - ] - }, - "EnableUserCreds": { - "methods": [ - "enable_user_creds" - ] - }, - "ExportDocuments": { - "methods": [ - "export_documents" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetField": { - "methods": [ - "get_field" - ] - }, - "GetIndex": { - "methods": [ - "get_index" - ] - }, - "GetUserCreds": { - "methods": [ - "get_user_creds" - ] - }, - "ImportDocuments": { - "methods": [ - "import_documents" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListFields": { - "methods": [ - "list_fields" - ] - }, - "ListIndexes": { - "methods": [ - "list_indexes" - ] - }, - "ListUserCreds": { - "methods": [ - "list_user_creds" - ] - }, - "ResetUserPassword": { - "methods": [ - "reset_user_password" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateField": { - "methods": [ - "update_field" - ] - } - } - }, - "grpc-async": { - "libraryClient": "FirestoreAdminAsyncClient", - "rpcs": { - "BulkDeleteDocuments": { - "methods": [ - "bulk_delete_documents" - ] - }, - "CloneDatabase": { - "methods": [ - "clone_database" - ] - }, - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateIndex": { - "methods": [ - "create_index" - ] - }, - "CreateUserCreds": { - "methods": [ - "create_user_creds" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteIndex": { - "methods": [ - "delete_index" - ] - }, - "DeleteUserCreds": { - "methods": [ - "delete_user_creds" - ] - }, - "DisableUserCreds": { - "methods": [ - "disable_user_creds" - ] - }, - "EnableUserCreds": { - "methods": [ - "enable_user_creds" - ] - }, - "ExportDocuments": { - "methods": [ - "export_documents" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetField": { - "methods": [ - "get_field" - ] - }, - "GetIndex": { - "methods": [ - "get_index" - ] - }, - "GetUserCreds": { - "methods": [ - "get_user_creds" - ] - }, - "ImportDocuments": { - "methods": [ - "import_documents" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListFields": { - "methods": [ - "list_fields" - ] - }, - "ListIndexes": { - "methods": [ - "list_indexes" - ] - }, - "ListUserCreds": { - "methods": [ - "list_user_creds" - ] - }, - "ResetUserPassword": { - "methods": [ - "reset_user_password" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateField": { - "methods": [ - "update_field" - ] - } - } - }, - "rest": { - "libraryClient": "FirestoreAdminClient", - "rpcs": { - "BulkDeleteDocuments": { - "methods": [ - "bulk_delete_documents" - ] - }, - "CloneDatabase": { - "methods": [ - "clone_database" - ] - }, - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateIndex": { - "methods": [ - "create_index" - ] - }, - "CreateUserCreds": { - "methods": [ - "create_user_creds" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteIndex": { - "methods": [ - "delete_index" - ] - }, - "DeleteUserCreds": { - "methods": [ - "delete_user_creds" - ] - }, - "DisableUserCreds": { - "methods": [ - "disable_user_creds" - ] - }, - "EnableUserCreds": { - "methods": [ - "enable_user_creds" - ] - }, - "ExportDocuments": { - "methods": [ - "export_documents" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetField": { - "methods": [ - "get_field" - ] - }, - "GetIndex": { - "methods": [ - "get_index" - ] - }, - "GetUserCreds": { - "methods": [ - "get_user_creds" - ] - }, - "ImportDocuments": { - "methods": [ - "import_documents" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListFields": { - "methods": [ - "list_fields" - ] - }, - "ListIndexes": { - "methods": [ - "list_indexes" - ] - }, - "ListUserCreds": { - "methods": [ - "list_user_creds" - ] - }, - "ResetUserPassword": { - "methods": [ - "reset_user_password" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateField": { - "methods": [ - "update_field" - ] - } - } - } - } - } - } -} diff --git a/google/cloud/firestore_admin_v1/gapic_version.py b/google/cloud/firestore_admin_v1/gapic_version.py deleted file mode 100644 index b5f2eaf6ce..0000000000 --- a/google/cloud/firestore_admin_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "2.21.0" # {x-release-please-version} diff --git a/google/cloud/firestore_admin_v1/py.typed b/google/cloud/firestore_admin_v1/py.typed deleted file mode 100644 index f7a4796eee..0000000000 --- a/google/cloud/firestore_admin_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-firestore-admin package uses inline types. diff --git a/google/cloud/firestore_admin_v1/services/__init__.py b/google/cloud/firestore_admin_v1/services/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/google/cloud/firestore_admin_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py b/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py deleted file mode 100644 index 41b9d63a9f..0000000000 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import FirestoreAdminClient -from .async_client import FirestoreAdminAsyncClient - -__all__ = ( - "FirestoreAdminClient", - "FirestoreAdminAsyncClient", -) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py deleted file mode 100644 index a2800e34ea..0000000000 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py +++ /dev/null @@ -1,4487 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import ( - Dict, - Callable, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, -) - -from google.cloud.firestore_admin_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation as gac_operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.firestore_admin_v1.services.firestore_admin import pagers -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import database as gfa_database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import field as gfa_field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.cloud.firestore_admin_v1.types import operation as gfa_operation -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.firestore_admin_v1.types import user_creds -from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport -from .client import FirestoreAdminClient - -try: - from google.api_core import client_logging # type: ignore - - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class FirestoreAdminAsyncClient: - """The Cloud Firestore Admin API. - - This API provides several administrative services for Cloud - Firestore. - - Project, Database, Namespace, Collection, Collection Group, and - Document are used as defined in the Google Cloud Firestore API. - - Operation: An Operation represents work being performed in the - background. - - The index service manages Cloud Firestore indexes. - - Index creation is performed asynchronously. An Operation resource is - created for each such asynchronous operation. The state of the - operation (including any errors encountered) may be queried via the - Operation resource. - - The Operations collection provides a record of actions performed for - the specified Project (including any Operations in progress). - Operations are not created directly but through calls on other - collections or resources. - - An Operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. Operations are garbage - collected after 30 days. By default, ListOperations will only return - in progress and failed operations. To list completed operation, - issue a ListOperations request with the filter ``done: true``. - - Operations are created by service ``FirestoreAdmin``, but are - accessed via service ``google.longrunning.Operations``. - """ - - _client: FirestoreAdminClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = FirestoreAdminClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = FirestoreAdminClient._DEFAULT_UNIVERSE - - backup_path = staticmethod(FirestoreAdminClient.backup_path) - parse_backup_path = staticmethod(FirestoreAdminClient.parse_backup_path) - backup_schedule_path = staticmethod(FirestoreAdminClient.backup_schedule_path) - parse_backup_schedule_path = staticmethod( - FirestoreAdminClient.parse_backup_schedule_path - ) - collection_group_path = staticmethod(FirestoreAdminClient.collection_group_path) - parse_collection_group_path = staticmethod( - FirestoreAdminClient.parse_collection_group_path - ) - database_path = staticmethod(FirestoreAdminClient.database_path) - parse_database_path = staticmethod(FirestoreAdminClient.parse_database_path) - field_path = staticmethod(FirestoreAdminClient.field_path) - parse_field_path = staticmethod(FirestoreAdminClient.parse_field_path) - index_path = staticmethod(FirestoreAdminClient.index_path) - parse_index_path = staticmethod(FirestoreAdminClient.parse_index_path) - location_path = staticmethod(FirestoreAdminClient.location_path) - parse_location_path = staticmethod(FirestoreAdminClient.parse_location_path) - operation_path = staticmethod(FirestoreAdminClient.operation_path) - parse_operation_path = staticmethod(FirestoreAdminClient.parse_operation_path) - user_creds_path = staticmethod(FirestoreAdminClient.user_creds_path) - parse_user_creds_path = staticmethod(FirestoreAdminClient.parse_user_creds_path) - common_billing_account_path = staticmethod( - FirestoreAdminClient.common_billing_account_path - ) - parse_common_billing_account_path = staticmethod( - FirestoreAdminClient.parse_common_billing_account_path - ) - common_folder_path = staticmethod(FirestoreAdminClient.common_folder_path) - parse_common_folder_path = staticmethod( - FirestoreAdminClient.parse_common_folder_path - ) - common_organization_path = staticmethod( - FirestoreAdminClient.common_organization_path - ) - parse_common_organization_path = staticmethod( - FirestoreAdminClient.parse_common_organization_path - ) - common_project_path = staticmethod(FirestoreAdminClient.common_project_path) - parse_common_project_path = staticmethod( - FirestoreAdminClient.parse_common_project_path - ) - common_location_path = staticmethod(FirestoreAdminClient.common_location_path) - parse_common_location_path = staticmethod( - FirestoreAdminClient.parse_common_location_path - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAdminAsyncClient: The constructed client. - """ - return FirestoreAdminClient.from_service_account_info.__func__(FirestoreAdminAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAdminAsyncClient: The constructed client. - """ - return FirestoreAdminClient.from_service_account_file.__func__(FirestoreAdminAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return FirestoreAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> FirestoreAdminTransport: - """Returns the transport used by the client instance. - - Returns: - FirestoreAdminTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = FirestoreAdminClient.get_transport_class - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[ - Union[str, FirestoreAdminTransport, Callable[..., FirestoreAdminTransport]] - ] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the firestore admin async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,FirestoreAdminTransport,Callable[..., FirestoreAdminTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the FirestoreAdminTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = FirestoreAdminClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - std_logging.DEBUG - ): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.firestore.admin_v1.FirestoreAdminAsyncClient`.", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "credentialsType": None, - }, - ) - - async def create_index( - self, - request: Optional[Union[firestore_admin.CreateIndexRequest, dict]] = None, - *, - parent: Optional[str] = None, - index: Optional[gfa_index.Index] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a composite index. This returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the creation. The - metadata for the operation will be the type - [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_create_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateIndexRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_index(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.CreateIndexRequest, dict]]): - The request object. The request for - [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. - parent (:class:`str`): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - index (:class:`google.cloud.firestore_admin_v1.types.Index`): - Required. The composite index to - create. - - This corresponds to the ``index`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.Index` Cloud Firestore indexes enable simple and complex queries against - documents in a database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, index] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.CreateIndexRequest): - request = firestore_admin.CreateIndexRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if index is not None: - request.index = index - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_index - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gfa_index.Index, - metadata_type=gfa_operation.IndexOperationMetadata, - ) - - # Done; return the response. - return response - - async def list_indexes( - self, - request: Optional[Union[firestore_admin.ListIndexesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListIndexesAsyncPager: - r"""Lists composite indexes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_list_indexes(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListIndexesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_indexes(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ListIndexesRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - parent (:class:`str`): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesAsyncPager: - The response for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListIndexesRequest): - request = firestore_admin.ListIndexesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_indexes - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListIndexesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_index( - self, - request: Optional[Union[firestore_admin.GetIndexRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> index.Index: - r"""Gets a composite index. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_get_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetIndexRequest( - name="name_value", - ) - - # Make the request - response = await client.get_index(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.GetIndexRequest, dict]]): - The request object. The request for - [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.Index: - Cloud Firestore indexes enable simple - and complex queries against documents in - a database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetIndexRequest): - request = firestore_admin.GetIndexRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_index - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_index( - self, - request: Optional[Union[firestore_admin.DeleteIndexRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a composite index. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_delete_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteIndexRequest( - name="name_value", - ) - - # Make the request - await client.delete_index(request=request) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteIndexRequest, dict]]): - The request object. The request for - [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DeleteIndexRequest): - request = firestore_admin.DeleteIndexRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_index - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_field( - self, - request: Optional[Union[firestore_admin.GetFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> field.Field: - r"""Gets the metadata and configuration for a Field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_get_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetFieldRequest( - name="name_value", - ) - - # Make the request - response = await client.get_field(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.GetFieldRequest, dict]]): - The request object. The request for - [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.Field: - Represents a single field in the - database. - Fields are grouped by their "Collection - Group", which represent all collections - in the database with the same ID. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetFieldRequest): - request = firestore_admin.GetFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_field - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_field( - self, - request: Optional[Union[firestore_admin.UpdateFieldRequest, dict]] = None, - *, - field: Optional[gfa_field.Field] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a field configuration. Currently, field updates apply - only to single field index configuration. However, calls to - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] - should provide a field mask to avoid changing any configuration - that the caller isn't aware of. The field mask should be - specified as: ``{ paths: "index_config" }``. - - This call returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the field update. The - metadata for the operation will be the type - [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. - - To configure the default field settings for the database, use - the special ``Field`` with resource name: - ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_update_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - field = firestore_admin_v1.Field() - field.name = "name_value" - - request = firestore_admin_v1.UpdateFieldRequest( - field=field, - ) - - # Make the request - operation = client.update_field(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.UpdateFieldRequest, dict]]): - The request object. The request for - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. - field (:class:`google.cloud.firestore_admin_v1.types.Field`): - Required. The field to be updated. - This corresponds to the ``field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Field` - Represents a single field in the database. - - Fields are grouped by their "Collection Group", which - represent all collections in the database with the - same ID. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [field] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.UpdateFieldRequest): - request = firestore_admin.UpdateFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if field is not None: - request.field = field - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_field - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("field.name", request.field.name),) - ), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gfa_field.Field, - metadata_type=gfa_operation.FieldOperationMetadata, - ) - - # Done; return the response. - return response - - async def list_fields( - self, - request: Optional[Union[firestore_admin.ListFieldsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListFieldsAsyncPager: - r"""Lists the field configuration and metadata for this database. - - Currently, - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - only supports listing fields that have been explicitly - overridden. To issue this query, call - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - with the filter set to ``indexConfig.usesAncestorConfig:false`` - or ``ttlConfig:*``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_list_fields(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListFieldsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_fields(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ListFieldsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - parent (:class:`str`): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsAsyncPager: - The response for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListFieldsRequest): - request = firestore_admin.ListFieldsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_fields - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListFieldsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def export_documents( - self, - request: Optional[Union[firestore_admin.ExportDocumentsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Exports a copy of all or a subset of documents from - Google Cloud Firestore to another storage system, such - as Google Cloud Storage. Recent updates to documents may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - For more details on export behavior and output format, - refer to: - - https://cloud.google.com/firestore/docs/manage-data/export-import - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_export_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ExportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.export_documents(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ExportDocumentsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. - name (:class:`str`): - Required. Database to export. Should be of the form: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.ExportDocumentsResponse` Returned in the [google.longrunning.Operation][google.longrunning.Operation] - response field. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ExportDocumentsRequest): - request = firestore_admin.ExportDocumentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.export_documents - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gfa_operation.ExportDocumentsResponse, - metadata_type=gfa_operation.ExportDocumentsMetadata, - ) - - # Done; return the response. - return response - - async def import_documents( - self, - request: Optional[Union[firestore_admin.ImportDocumentsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Imports documents into Google Cloud Firestore. - Existing documents with the same name are overwritten. - The import occurs in the background and its progress can - be monitored and managed via the Operation resource that - is created. If an ImportDocuments operation is - cancelled, it is possible that a subset of the data has - already been imported to Cloud Firestore. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_import_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ImportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.import_documents(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ImportDocumentsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. - name (:class:`str`): - Required. Database to import into. Should be of the - form: ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ImportDocumentsRequest): - request = firestore_admin.ImportDocumentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.import_documents - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=gfa_operation.ImportDocumentsMetadata, - ) - - # Done; return the response. - return response - - async def bulk_delete_documents( - self, - request: Optional[ - Union[firestore_admin.BulkDeleteDocumentsRequest, dict] - ] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Bulk deletes a subset of documents from Google Cloud - Firestore. Documents created or updated after the - underlying system starts to process the request will not - be deleted. The bulk delete occurs in the background and - its progress can be monitored and managed via the - Operation resource that is created. - - For more details on bulk delete behavior, refer to: - - https://cloud.google.com/firestore/docs/manage-data/bulk-delete - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_bulk_delete_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.BulkDeleteDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.bulk_delete_documents(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.BulkDeleteDocumentsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.BulkDeleteDocuments][google.firestore.admin.v1.FirestoreAdmin.BulkDeleteDocuments]. - - When both collection_ids and namespace_ids are set, only - documents satisfying both conditions will be deleted. - - Requests with namespace_ids and collection_ids both - empty will be rejected. Please use - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase] - instead. - name (:class:`str`): - Required. Database to operate. Should be of the form: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.BulkDeleteDocumentsResponse` The response for - [FirestoreAdmin.BulkDeleteDocuments][google.firestore.admin.v1.FirestoreAdmin.BulkDeleteDocuments]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.BulkDeleteDocumentsRequest): - request = firestore_admin.BulkDeleteDocumentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.bulk_delete_documents - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - firestore_admin.BulkDeleteDocumentsResponse, - metadata_type=gfa_operation.BulkDeleteDocumentsMetadata, - ) - - # Done; return the response. - return response - - async def create_database( - self, - request: Optional[Union[firestore_admin.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database: Optional[gfa_database.Database] = None, - database_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_create_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.CreateDatabaseRequest, dict]]): - The request object. The request for - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. - parent (:class:`str`): - Required. A parent name of the form - ``projects/{project_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database (:class:`google.cloud.firestore_admin_v1.types.Database`): - Required. The Database to create. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (:class:`str`): - Required. The ID to use for the database, which will - become the final component of the database's resource - name. - - This value should be 4-63 characters. Valid characters - are /[a-z][0-9]-/ with first character a letter and the - last a letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - - "(default)" database ID is also valid. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, database, database_id] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.CreateDatabaseRequest): - request = firestore_admin.CreateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database is not None: - request.database = database - if database_id is not None: - request.database_id = database_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_database - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gfa_database.Database, - metadata_type=firestore_admin.CreateDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def get_database( - self, - request: Optional[Union[firestore_admin.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> database.Database: - r"""Gets information about a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_get_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.GetDatabaseRequest, dict]]): - The request object. The request for - [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.Database: - A Cloud Firestore Database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetDatabaseRequest): - request = firestore_admin.GetDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_database - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_databases( - self, - request: Optional[Union[firestore_admin.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListDatabasesResponse: - r"""List all the databases in the project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_list_databases(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_databases(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ListDatabasesRequest, dict]]): - The request object. A request to list the Firestore - Databases in all locations for a - project. - parent (:class:`str`): - Required. A parent name of the form - ``projects/{project_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.ListDatabasesResponse: - The list of databases for a project. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListDatabasesRequest): - request = firestore_admin.ListDatabasesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_databases - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_database( - self, - request: Optional[Union[firestore_admin.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[gfa_database.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_update_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateDatabaseRequest( - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.UpdateDatabaseRequest, dict]]): - The request object. The request for - [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. - database (:class:`google.cloud.firestore_admin_v1.types.Database`): - Required. The database to update. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, update_mask] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.UpdateDatabaseRequest): - request = firestore_admin.UpdateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_database - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("database.name", request.database.name),) - ), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gfa_database.Database, - metadata_type=firestore_admin.UpdateDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def delete_database( - self, - request: Optional[Union[firestore_admin.DeleteDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_delete_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteDatabaseRequest, dict]]): - The request object. The request for - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DeleteDatabaseRequest): - request = firestore_admin.DeleteDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_database - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - database.Database, - metadata_type=firestore_admin.DeleteDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def create_user_creds( - self, - request: Optional[Union[firestore_admin.CreateUserCredsRequest, dict]] = None, - *, - parent: Optional[str] = None, - user_creds: Optional[gfa_user_creds.UserCreds] = None, - user_creds_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gfa_user_creds.UserCreds: - r"""Create a user creds. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_create_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateUserCredsRequest( - parent="parent_value", - user_creds_id="user_creds_id_value", - ) - - # Make the request - response = await client.create_user_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.CreateUserCredsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.CreateUserCreds][google.firestore.admin.v1.FirestoreAdmin.CreateUserCreds]. - parent (:class:`str`): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - user_creds (:class:`google.cloud.firestore_admin_v1.types.UserCreds`): - Required. The user creds to create. - This corresponds to the ``user_creds`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - user_creds_id (:class:`str`): - Required. The ID to use for the user creds, which will - become the final component of the user creds's resource - name. - - This value should be 4-63 characters. Valid characters - are /[a-z][0-9]-/ with first character a letter and the - last a letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - - This corresponds to the ``user_creds_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.UserCreds: - A Cloud Firestore User Creds. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, user_creds, user_creds_id] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.CreateUserCredsRequest): - request = firestore_admin.CreateUserCredsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if user_creds is not None: - request.user_creds = user_creds - if user_creds_id is not None: - request.user_creds_id = user_creds_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_user_creds - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_user_creds( - self, - request: Optional[Union[firestore_admin.GetUserCredsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Gets a user creds resource. Note that the returned - resource does not contain the secret value itself. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_get_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetUserCredsRequest( - name="name_value", - ) - - # Make the request - response = await client.get_user_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.GetUserCredsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.GetUserCreds][google.firestore.admin.v1.FirestoreAdmin.GetUserCreds]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.UserCreds: - A Cloud Firestore User Creds. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetUserCredsRequest): - request = firestore_admin.GetUserCredsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_user_creds - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_user_creds( - self, - request: Optional[Union[firestore_admin.ListUserCredsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListUserCredsResponse: - r"""List all user creds in the database. Note that the - returned resource does not contain the secret value - itself. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_list_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListUserCredsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_user_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ListUserCredsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ListUserCreds][google.firestore.admin.v1.FirestoreAdmin.ListUserCreds]. - parent (:class:`str`): - Required. A parent database name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.ListUserCredsResponse: - The response for - [FirestoreAdmin.ListUserCreds][google.firestore.admin.v1.FirestoreAdmin.ListUserCreds]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListUserCredsRequest): - request = firestore_admin.ListUserCredsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_user_creds - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def enable_user_creds( - self, - request: Optional[Union[firestore_admin.EnableUserCredsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Enables a user creds. No-op if the user creds are - already enabled. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_enable_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.EnableUserCredsRequest( - name="name_value", - ) - - # Make the request - response = await client.enable_user_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.EnableUserCredsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.EnableUserCreds][google.firestore.admin.v1.FirestoreAdmin.EnableUserCreds]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.UserCreds: - A Cloud Firestore User Creds. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.EnableUserCredsRequest): - request = firestore_admin.EnableUserCredsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.enable_user_creds - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def disable_user_creds( - self, - request: Optional[Union[firestore_admin.DisableUserCredsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Disables a user creds. No-op if the user creds are - already disabled. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_disable_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DisableUserCredsRequest( - name="name_value", - ) - - # Make the request - response = await client.disable_user_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.DisableUserCredsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.DisableUserCreds][google.firestore.admin.v1.FirestoreAdmin.DisableUserCreds]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.UserCreds: - A Cloud Firestore User Creds. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DisableUserCredsRequest): - request = firestore_admin.DisableUserCredsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.disable_user_creds - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def reset_user_password( - self, - request: Optional[Union[firestore_admin.ResetUserPasswordRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Resets the password of a user creds. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_reset_user_password(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ResetUserPasswordRequest( - name="name_value", - ) - - # Make the request - response = await client.reset_user_password(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ResetUserPasswordRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ResetUserPassword][google.firestore.admin.v1.FirestoreAdmin.ResetUserPassword]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.UserCreds: - A Cloud Firestore User Creds. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ResetUserPasswordRequest): - request = firestore_admin.ResetUserPasswordRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.reset_user_password - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_user_creds( - self, - request: Optional[Union[firestore_admin.DeleteUserCredsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a user creds. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_delete_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteUserCredsRequest( - name="name_value", - ) - - # Make the request - await client.delete_user_creds(request=request) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteUserCredsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.DeleteUserCreds][google.firestore.admin.v1.FirestoreAdmin.DeleteUserCreds]. - name (:class:`str`): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DeleteUserCredsRequest): - request = firestore_admin.DeleteUserCredsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_user_creds - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_backup( - self, - request: Optional[Union[firestore_admin.GetBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backup.Backup: - r"""Gets information about a backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_get_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.GetBackupRequest, dict]]): - The request object. The request for - [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. - name (:class:`str`): - Required. Name of the backup to fetch. - - Format is - ``projects/{project}/locations/{location}/backups/{backup}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.Backup: - A Backup of a Cloud Firestore - Database. - The backup contains all documents and - index configurations for the given - database at a specific point in time. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetBackupRequest): - request = firestore_admin.GetBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_backup - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_backups( - self, - request: Optional[Union[firestore_admin.ListBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListBackupsResponse: - r"""Lists all the backups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_list_backups(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_backups(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ListBackupsRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - parent (:class:`str`): - Required. The location to list backups from. - - Format is ``projects/{project}/locations/{location}``. - Use ``{location} = '-'`` to list backups from all - locations for the given project. This allows listing - backups from a single location or from all locations. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.ListBackupsResponse: - The response for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListBackupsRequest): - request = firestore_admin.ListBackupsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_backups - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_backup( - self, - request: Optional[Union[firestore_admin.DeleteBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_delete_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup(request=request) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteBackupRequest, dict]]): - The request object. The request for - [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. - name (:class:`str`): - Required. Name of the backup to delete. - - format is - ``projects/{project}/locations/{location}/backups/{backup}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DeleteBackupRequest): - request = firestore_admin.DeleteBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_backup - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def restore_database( - self, - request: Optional[Union[firestore_admin.RestoreDatabaseRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new database by restoring from an existing backup. - - The new database must be in the same cloud region or - multi-region location as the existing backup. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing backup. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the restore, with the - Operation's [metadata][google.longrunning.Operation.metadata] - field type being the - [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the - restore was successful. The new database is not readable or - writeable until the LRO has completed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_restore_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.RestoreDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - backup="backup_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.RestoreDatabaseRequest, dict]]): - The request object. The request message for - [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.FirestoreAdmin.RestoreDatabase]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.RestoreDatabaseRequest): - request = firestore_admin.RestoreDatabaseRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.restore_database - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - database.Database, - metadata_type=gfa_operation.RestoreDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def create_backup_schedule( - self, - request: Optional[ - Union[firestore_admin.CreateBackupScheduleRequest, dict] - ] = None, - *, - parent: Optional[str] = None, - backup_schedule: Optional[schedule.BackupSchedule] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> schedule.BackupSchedule: - r"""Creates a backup schedule on a database. - At most two backup schedules can be configured on a - database, one daily backup schedule and one weekly - backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_create_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateBackupScheduleRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.CreateBackupScheduleRequest, dict]]): - The request object. The request for - [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. - parent (:class:`str`): - Required. The parent database. - - Format ``projects/{project}/databases/{database}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule (:class:`google.cloud.firestore_admin_v1.types.BackupSchedule`): - Required. The backup schedule to - create. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_schedule] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.CreateBackupScheduleRequest): - request = firestore_admin.CreateBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_schedule is not None: - request.backup_schedule = backup_schedule - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_backup_schedule - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_backup_schedule( - self, - request: Optional[Union[firestore_admin.GetBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> schedule.BackupSchedule: - r"""Gets information about a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_get_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.GetBackupScheduleRequest, dict]]): - The request object. The request for - [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. - name (:class:`str`): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetBackupScheduleRequest): - request = firestore_admin.GetBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_backup_schedule - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_backup_schedules( - self, - request: Optional[ - Union[firestore_admin.ListBackupSchedulesRequest, dict] - ] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListBackupSchedulesResponse: - r"""List backup schedules. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_list_backup_schedules(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_backup_schedules(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.ListBackupSchedulesRequest, dict]]): - The request object. The request for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - parent (:class:`str`): - Required. The parent database. - - Format is ``projects/{project}/databases/{database}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse: - The response for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListBackupSchedulesRequest): - request = firestore_admin.ListBackupSchedulesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_backup_schedules - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_backup_schedule( - self, - request: Optional[ - Union[firestore_admin.UpdateBackupScheduleRequest, dict] - ] = None, - *, - backup_schedule: Optional[schedule.BackupSchedule] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> schedule.BackupSchedule: - r"""Updates a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_update_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = await client.update_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.UpdateBackupScheduleRequest, dict]]): - The request object. The request for - [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. - backup_schedule (:class:`google.cloud.firestore_admin_v1.types.BackupSchedule`): - Required. The backup schedule to - update. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup_schedule, update_mask] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.UpdateBackupScheduleRequest): - request = firestore_admin.UpdateBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_backup_schedule - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("backup_schedule.name", request.backup_schedule.name),) - ), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_backup_schedule( - self, - request: Optional[ - Union[firestore_admin.DeleteBackupScheduleRequest, dict] - ] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_delete_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup_schedule(request=request) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.DeleteBackupScheduleRequest, dict]]): - The request object. The request for - [FirestoreAdmin.DeleteBackupSchedules][]. - name (:class:`str`): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DeleteBackupScheduleRequest): - request = firestore_admin.DeleteBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_backup_schedule - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def clone_database( - self, - request: Optional[Union[firestore_admin.CloneDatabaseRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new database by cloning an existing one. - - The new database must be in the same cloud region or - multi-region location as the existing database. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing database. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the clone, with the Operation's - [metadata][google.longrunning.Operation.metadata] field type - being the - [CloneDatabaseMetadata][google.firestore.admin.v1.CloneDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the clone - was successful. The new database is not readable or writeable - until the LRO has completed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - async def sample_clone_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminAsyncClient() - - # Initialize request argument(s) - pitr_snapshot = firestore_admin_v1.PitrSnapshot() - pitr_snapshot.database = "database_value" - - request = firestore_admin_v1.CloneDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - pitr_snapshot=pitr_snapshot, - ) - - # Make the request - operation = client.clone_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_admin_v1.types.CloneDatabaseRequest, dict]]): - The request object. The request message for - [FirestoreAdmin.CloneDatabase][google.firestore.admin.v1.FirestoreAdmin.CloneDatabase]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.CloneDatabaseRequest): - request = firestore_admin.CloneDatabaseRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.clone_database - ] - - header_params = {} - - routing_param_regex = re.compile("^projects/(?P[^/]+)(?:/.*)?$") - regex_match = routing_param_regex.match(request.pitr_snapshot.database) - if regex_match and regex_match.group("project_id"): - header_params["project_id"] = regex_match.group("project_id") - - routing_param_regex = re.compile( - "^projects/[^/]+/databases/(?P[^/]+)(?:/.*)?$" - ) - regex_match = routing_param_regex.match(request.pitr_snapshot.database) - if regex_match and regex_match.group("database_id"): - header_params["database_id"] = regex_match.group("database_id") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - database.Database, - metadata_type=gfa_operation.CloneDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def __aenter__(self) -> "FirestoreAdminAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ("FirestoreAdminAsyncClient",) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py deleted file mode 100644 index 991d58ccdc..0000000000 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py +++ /dev/null @@ -1,4979 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import ( - Dict, - Callable, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) -import warnings - -from google.cloud.firestore_admin_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation as gac_operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.firestore_admin_v1.services.firestore_admin import pagers -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import database as gfa_database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import field as gfa_field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.cloud.firestore_admin_v1.types import operation as gfa_operation -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.firestore_admin_v1.types import user_creds -from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import FirestoreAdminGrpcTransport -from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport -from .transports.rest import FirestoreAdminRestTransport - - -class FirestoreAdminClientMeta(type): - """Metaclass for the FirestoreAdmin client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = ( - OrderedDict() - ) # type: Dict[str, Type[FirestoreAdminTransport]] - _transport_registry["grpc"] = FirestoreAdminGrpcTransport - _transport_registry["grpc_asyncio"] = FirestoreAdminGrpcAsyncIOTransport - _transport_registry["rest"] = FirestoreAdminRestTransport - - def get_transport_class( - cls, - label: Optional[str] = None, - ) -> Type[FirestoreAdminTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class FirestoreAdminClient(metaclass=FirestoreAdminClientMeta): - """The Cloud Firestore Admin API. - - This API provides several administrative services for Cloud - Firestore. - - Project, Database, Namespace, Collection, Collection Group, and - Document are used as defined in the Google Cloud Firestore API. - - Operation: An Operation represents work being performed in the - background. - - The index service manages Cloud Firestore indexes. - - Index creation is performed asynchronously. An Operation resource is - created for each such asynchronous operation. The state of the - operation (including any errors encountered) may be queried via the - Operation resource. - - The Operations collection provides a record of actions performed for - the specified Project (including any Operations in progress). - Operations are not created directly but through calls on other - collections or resources. - - An Operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. Operations are garbage - collected after 30 days. By default, ListOperations will only return - in progress and failed operations. To list completed operation, - issue a ListOperations request with the filter ``done: true``. - - Operations are created by service ``FirestoreAdmin``, but are - accessed via service ``google.longrunning.Operations``. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "firestore.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "firestore.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> FirestoreAdminTransport: - """Returns the transport used by the client instance. - - Returns: - FirestoreAdminTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def backup_path( - project: str, - location: str, - backup: str, - ) -> str: - """Returns a fully-qualified backup string.""" - return "projects/{project}/locations/{location}/backups/{backup}".format( - project=project, - location=location, - backup=backup, - ) - - @staticmethod - def parse_backup_path(path: str) -> Dict[str, str]: - """Parses a backup path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/backups/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def backup_schedule_path( - project: str, - database: str, - backup_schedule: str, - ) -> str: - """Returns a fully-qualified backup_schedule string.""" - return "projects/{project}/databases/{database}/backupSchedules/{backup_schedule}".format( - project=project, - database=database, - backup_schedule=backup_schedule, - ) - - @staticmethod - def parse_backup_schedule_path(path: str) -> Dict[str, str]: - """Parses a backup_schedule path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/databases/(?P.+?)/backupSchedules/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def collection_group_path( - project: str, - database: str, - collection: str, - ) -> str: - """Returns a fully-qualified collection_group string.""" - return "projects/{project}/databases/{database}/collectionGroups/{collection}".format( - project=project, - database=database, - collection=collection, - ) - - @staticmethod - def parse_collection_group_path(path: str) -> Dict[str, str]: - """Parses a collection_group path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def database_path( - project: str, - database: str, - ) -> str: - """Returns a fully-qualified database string.""" - return "projects/{project}/databases/{database}".format( - project=project, - database=database, - ) - - @staticmethod - def parse_database_path(path: str) -> Dict[str, str]: - """Parses a database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def field_path( - project: str, - database: str, - collection: str, - field: str, - ) -> str: - """Returns a fully-qualified field string.""" - return "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}".format( - project=project, - database=database, - collection=collection, - field=field, - ) - - @staticmethod - def parse_field_path(path: str) -> Dict[str, str]: - """Parses a field path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)/fields/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def index_path( - project: str, - database: str, - collection: str, - index: str, - ) -> str: - """Returns a fully-qualified index string.""" - return "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}".format( - project=project, - database=database, - collection=collection, - index=index, - ) - - @staticmethod - def parse_index_path(path: str) -> Dict[str, str]: - """Parses a index path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)/indexes/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def location_path( - project: str, - location: str, - ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - - @staticmethod - def parse_location_path(path: str) -> Dict[str, str]: - """Parses a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def operation_path( - project: str, - database: str, - operation: str, - ) -> str: - """Returns a fully-qualified operation string.""" - return "projects/{project}/databases/{database}/operations/{operation}".format( - project=project, - database=database, - operation=operation, - ) - - @staticmethod - def parse_operation_path(path: str) -> Dict[str, str]: - """Parses a operation path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/databases/(?P.+?)/operations/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def user_creds_path( - project: str, - database: str, - user_creds: str, - ) -> str: - """Returns a fully-qualified user_creds string.""" - return "projects/{project}/databases/{database}/userCreds/{user_creds}".format( - project=project, - database=database, - user_creds=user_creds, - ) - - @staticmethod - def parse_user_creds_path(path: str) -> Dict[str, str]: - """Parses a user_creds path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/databases/(?P.+?)/userCreds/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path( - folder: str, - ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path( - organization: str, - ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path( - project: str, - ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[client_options_lib.ClientOptions] = None - ): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn( - "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning, - ) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv( - "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" - ).lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint( - api_override, client_cert_source, universe_domain, use_mtls_endpoint - ): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - _default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError( - f"mTLS is not supported in any universe other than {_default_universe}." - ) - api_endpoint = FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=universe_domain - ) - return api_endpoint - - @staticmethod - def _get_universe_domain( - client_universe_domain: Optional[str], universe_domain_env: Optional[str] - ) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = FirestoreAdminClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [ - HTTPStatus.UNAUTHORIZED, - HTTPStatus.FORBIDDEN, - HTTPStatus.NOT_FOUND, - ]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[ - Union[str, FirestoreAdminTransport, Callable[..., FirestoreAdminTransport]] - ] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the firestore admin client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,FirestoreAdminTransport,Callable[..., FirestoreAdminTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the FirestoreAdminTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast( - client_options_lib.ClientOptions, self._client_options - ) - - universe_domain_opt = getattr(self._client_options, "universe_domain", None) - - ( - self._use_client_cert, - self._use_mtls_endpoint, - self._universe_domain_env, - ) = FirestoreAdminClient._read_environment_variables() - self._client_cert_source = FirestoreAdminClient._get_client_cert_source( - self._client_options.client_cert_source, self._use_client_cert - ) - self._universe_domain = FirestoreAdminClient._get_universe_domain( - universe_domain_opt, self._universe_domain_env - ) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError( - "client_options.api_key and credentials are mutually exclusive" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, FirestoreAdminTransport) - if transport_provided: - # transport is a FirestoreAdminTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(FirestoreAdminTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = ( - self._api_endpoint - or FirestoreAdminClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint, - ) - ) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr( - google.auth._default, "get_api_key_credentials" - ): - credentials = google.auth._default.get_api_key_credentials( - api_key_value - ) - - transport_init: Union[ - Type[FirestoreAdminTransport], Callable[..., FirestoreAdminTransport] - ] = ( - FirestoreAdminClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., FirestoreAdminTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - std_logging.DEBUG - ): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.firestore.admin_v1.FirestoreAdminClient`.", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "credentialsType": None, - }, - ) - - def create_index( - self, - request: Optional[Union[firestore_admin.CreateIndexRequest, dict]] = None, - *, - parent: Optional[str] = None, - index: Optional[gfa_index.Index] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gac_operation.Operation: - r"""Creates a composite index. This returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the creation. The - metadata for the operation will be the type - [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_create_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateIndexRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_index(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.CreateIndexRequest, dict]): - The request object. The request for - [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - index (google.cloud.firestore_admin_v1.types.Index): - Required. The composite index to - create. - - This corresponds to the ``index`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.Index` Cloud Firestore indexes enable simple and complex queries against - documents in a database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, index] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.CreateIndexRequest): - request = firestore_admin.CreateIndexRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if index is not None: - request.index = index - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_index] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - gfa_index.Index, - metadata_type=gfa_operation.IndexOperationMetadata, - ) - - # Done; return the response. - return response - - def list_indexes( - self, - request: Optional[Union[firestore_admin.ListIndexesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListIndexesPager: - r"""Lists composite indexes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_list_indexes(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListIndexesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_indexes(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ListIndexesRequest, dict]): - The request object. The request for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesPager: - The response for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListIndexesRequest): - request = firestore_admin.ListIndexesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_indexes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListIndexesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_index( - self, - request: Optional[Union[firestore_admin.GetIndexRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> index.Index: - r"""Gets a composite index. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_get_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetIndexRequest( - name="name_value", - ) - - # Make the request - response = client.get_index(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.GetIndexRequest, dict]): - The request object. The request for - [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.Index: - Cloud Firestore indexes enable simple - and complex queries against documents in - a database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetIndexRequest): - request = firestore_admin.GetIndexRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_index] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_index( - self, - request: Optional[Union[firestore_admin.DeleteIndexRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a composite index. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_delete_index(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteIndexRequest( - name="name_value", - ) - - # Make the request - client.delete_index(request=request) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.DeleteIndexRequest, dict]): - The request object. The request for - [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DeleteIndexRequest): - request = firestore_admin.DeleteIndexRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_index] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_field( - self, - request: Optional[Union[firestore_admin.GetFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> field.Field: - r"""Gets the metadata and configuration for a Field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_get_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetFieldRequest( - name="name_value", - ) - - # Make the request - response = client.get_field(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.GetFieldRequest, dict]): - The request object. The request for - [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.Field: - Represents a single field in the - database. - Fields are grouped by their "Collection - Group", which represent all collections - in the database with the same ID. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetFieldRequest): - request = firestore_admin.GetFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_field( - self, - request: Optional[Union[firestore_admin.UpdateFieldRequest, dict]] = None, - *, - field: Optional[gfa_field.Field] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gac_operation.Operation: - r"""Updates a field configuration. Currently, field updates apply - only to single field index configuration. However, calls to - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] - should provide a field mask to avoid changing any configuration - that the caller isn't aware of. The field mask should be - specified as: ``{ paths: "index_config" }``. - - This call returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the field update. The - metadata for the operation will be the type - [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. - - To configure the default field settings for the database, use - the special ``Field`` with resource name: - ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_update_field(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - field = firestore_admin_v1.Field() - field.name = "name_value" - - request = firestore_admin_v1.UpdateFieldRequest( - field=field, - ) - - # Make the request - operation = client.update_field(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.UpdateFieldRequest, dict]): - The request object. The request for - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. - field (google.cloud.firestore_admin_v1.types.Field): - Required. The field to be updated. - This corresponds to the ``field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Field` - Represents a single field in the database. - - Fields are grouped by their "Collection Group", which - represent all collections in the database with the - same ID. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [field] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.UpdateFieldRequest): - request = firestore_admin.UpdateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if field is not None: - request.field = field - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("field.name", request.field.name),) - ), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - gfa_field.Field, - metadata_type=gfa_operation.FieldOperationMetadata, - ) - - # Done; return the response. - return response - - def list_fields( - self, - request: Optional[Union[firestore_admin.ListFieldsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListFieldsPager: - r"""Lists the field configuration and metadata for this database. - - Currently, - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - only supports listing fields that have been explicitly - overridden. To issue this query, call - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - with the filter set to ``indexConfig.usesAncestorConfig:false`` - or ``ttlConfig:*``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_list_fields(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListFieldsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_fields(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ListFieldsRequest, dict]): - The request object. The request for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsPager: - The response for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListFieldsRequest): - request = firestore_admin.ListFieldsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_fields] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListFieldsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def export_documents( - self, - request: Optional[Union[firestore_admin.ExportDocumentsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gac_operation.Operation: - r"""Exports a copy of all or a subset of documents from - Google Cloud Firestore to another storage system, such - as Google Cloud Storage. Recent updates to documents may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - For more details on export behavior and output format, - refer to: - - https://cloud.google.com/firestore/docs/manage-data/export-import - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_export_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ExportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.export_documents(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ExportDocumentsRequest, dict]): - The request object. The request for - [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. - name (str): - Required. Database to export. Should be of the form: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.ExportDocumentsResponse` Returned in the [google.longrunning.Operation][google.longrunning.Operation] - response field. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ExportDocumentsRequest): - request = firestore_admin.ExportDocumentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_documents] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - gfa_operation.ExportDocumentsResponse, - metadata_type=gfa_operation.ExportDocumentsMetadata, - ) - - # Done; return the response. - return response - - def import_documents( - self, - request: Optional[Union[firestore_admin.ImportDocumentsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gac_operation.Operation: - r"""Imports documents into Google Cloud Firestore. - Existing documents with the same name are overwritten. - The import occurs in the background and its progress can - be monitored and managed via the Operation resource that - is created. If an ImportDocuments operation is - cancelled, it is possible that a subset of the data has - already been imported to Cloud Firestore. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_import_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ImportDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.import_documents(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ImportDocumentsRequest, dict]): - The request object. The request for - [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. - name (str): - Required. Database to import into. Should be of the - form: ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ImportDocumentsRequest): - request = firestore_admin.ImportDocumentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.import_documents] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=gfa_operation.ImportDocumentsMetadata, - ) - - # Done; return the response. - return response - - def bulk_delete_documents( - self, - request: Optional[ - Union[firestore_admin.BulkDeleteDocumentsRequest, dict] - ] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gac_operation.Operation: - r"""Bulk deletes a subset of documents from Google Cloud - Firestore. Documents created or updated after the - underlying system starts to process the request will not - be deleted. The bulk delete occurs in the background and - its progress can be monitored and managed via the - Operation resource that is created. - - For more details on bulk delete behavior, refer to: - - https://cloud.google.com/firestore/docs/manage-data/bulk-delete - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_bulk_delete_documents(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.BulkDeleteDocumentsRequest( - name="name_value", - ) - - # Make the request - operation = client.bulk_delete_documents(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.BulkDeleteDocumentsRequest, dict]): - The request object. The request for - [FirestoreAdmin.BulkDeleteDocuments][google.firestore.admin.v1.FirestoreAdmin.BulkDeleteDocuments]. - - When both collection_ids and namespace_ids are set, only - documents satisfying both conditions will be deleted. - - Requests with namespace_ids and collection_ids both - empty will be rejected. Please use - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase] - instead. - name (str): - Required. Database to operate. Should be of the form: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.BulkDeleteDocumentsResponse` The response for - [FirestoreAdmin.BulkDeleteDocuments][google.firestore.admin.v1.FirestoreAdmin.BulkDeleteDocuments]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.BulkDeleteDocumentsRequest): - request = firestore_admin.BulkDeleteDocumentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.bulk_delete_documents] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - firestore_admin.BulkDeleteDocumentsResponse, - metadata_type=gfa_operation.BulkDeleteDocumentsMetadata, - ) - - # Done; return the response. - return response - - def create_database( - self, - request: Optional[Union[firestore_admin.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database: Optional[gfa_database.Database] = None, - database_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gac_operation.Operation: - r"""Create a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_create_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.CreateDatabaseRequest, dict]): - The request object. The request for - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. - parent (str): - Required. A parent name of the form - ``projects/{project_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database (google.cloud.firestore_admin_v1.types.Database): - Required. The Database to create. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (str): - Required. The ID to use for the database, which will - become the final component of the database's resource - name. - - This value should be 4-63 characters. Valid characters - are /[a-z][0-9]-/ with first character a letter and the - last a letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - - "(default)" database ID is also valid. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, database, database_id] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.CreateDatabaseRequest): - request = firestore_admin.CreateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database is not None: - request.database = database - if database_id is not None: - request.database_id = database_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - gfa_database.Database, - metadata_type=firestore_admin.CreateDatabaseMetadata, - ) - - # Done; return the response. - return response - - def get_database( - self, - request: Optional[Union[firestore_admin.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> database.Database: - r"""Gets information about a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_get_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.GetDatabaseRequest, dict]): - The request object. The request for - [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.Database: - A Cloud Firestore Database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetDatabaseRequest): - request = firestore_admin.GetDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_databases( - self, - request: Optional[Union[firestore_admin.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListDatabasesResponse: - r"""List all the databases in the project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_list_databases(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_databases(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ListDatabasesRequest, dict]): - The request object. A request to list the Firestore - Databases in all locations for a - project. - parent (str): - Required. A parent name of the form - ``projects/{project_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.ListDatabasesResponse: - The list of databases for a project. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListDatabasesRequest): - request = firestore_admin.ListDatabasesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_databases] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_database( - self, - request: Optional[Union[firestore_admin.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[gfa_database.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gac_operation.Operation: - r"""Updates a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_update_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateDatabaseRequest( - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.UpdateDatabaseRequest, dict]): - The request object. The request for - [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. - database (google.cloud.firestore_admin_v1.types.Database): - Required. The database to update. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, update_mask] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.UpdateDatabaseRequest): - request = firestore_admin.UpdateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("database.name", request.database.name),) - ), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - gfa_database.Database, - metadata_type=firestore_admin.UpdateDatabaseMetadata, - ) - - # Done; return the response. - return response - - def delete_database( - self, - request: Optional[Union[firestore_admin.DeleteDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gac_operation.Operation: - r"""Deletes a database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_delete_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.DeleteDatabaseRequest, dict]): - The request object. The request for - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DeleteDatabaseRequest): - request = firestore_admin.DeleteDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - database.Database, - metadata_type=firestore_admin.DeleteDatabaseMetadata, - ) - - # Done; return the response. - return response - - def create_user_creds( - self, - request: Optional[Union[firestore_admin.CreateUserCredsRequest, dict]] = None, - *, - parent: Optional[str] = None, - user_creds: Optional[gfa_user_creds.UserCreds] = None, - user_creds_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gfa_user_creds.UserCreds: - r"""Create a user creds. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_create_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateUserCredsRequest( - parent="parent_value", - user_creds_id="user_creds_id_value", - ) - - # Make the request - response = client.create_user_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.CreateUserCredsRequest, dict]): - The request object. The request for - [FirestoreAdmin.CreateUserCreds][google.firestore.admin.v1.FirestoreAdmin.CreateUserCreds]. - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - user_creds (google.cloud.firestore_admin_v1.types.UserCreds): - Required. The user creds to create. - This corresponds to the ``user_creds`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - user_creds_id (str): - Required. The ID to use for the user creds, which will - become the final component of the user creds's resource - name. - - This value should be 4-63 characters. Valid characters - are /[a-z][0-9]-/ with first character a letter and the - last a letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - - This corresponds to the ``user_creds_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.UserCreds: - A Cloud Firestore User Creds. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, user_creds, user_creds_id] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.CreateUserCredsRequest): - request = firestore_admin.CreateUserCredsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if user_creds is not None: - request.user_creds = user_creds - if user_creds_id is not None: - request.user_creds_id = user_creds_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_user_creds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_user_creds( - self, - request: Optional[Union[firestore_admin.GetUserCredsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Gets a user creds resource. Note that the returned - resource does not contain the secret value itself. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_get_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetUserCredsRequest( - name="name_value", - ) - - # Make the request - response = client.get_user_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.GetUserCredsRequest, dict]): - The request object. The request for - [FirestoreAdmin.GetUserCreds][google.firestore.admin.v1.FirestoreAdmin.GetUserCreds]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.UserCreds: - A Cloud Firestore User Creds. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetUserCredsRequest): - request = firestore_admin.GetUserCredsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_user_creds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_user_creds( - self, - request: Optional[Union[firestore_admin.ListUserCredsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListUserCredsResponse: - r"""List all user creds in the database. Note that the - returned resource does not contain the secret value - itself. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_list_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListUserCredsRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_user_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ListUserCredsRequest, dict]): - The request object. The request for - [FirestoreAdmin.ListUserCreds][google.firestore.admin.v1.FirestoreAdmin.ListUserCreds]. - parent (str): - Required. A parent database name of the form - ``projects/{project_id}/databases/{database_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.ListUserCredsResponse: - The response for - [FirestoreAdmin.ListUserCreds][google.firestore.admin.v1.FirestoreAdmin.ListUserCreds]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListUserCredsRequest): - request = firestore_admin.ListUserCredsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_user_creds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def enable_user_creds( - self, - request: Optional[Union[firestore_admin.EnableUserCredsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Enables a user creds. No-op if the user creds are - already enabled. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_enable_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.EnableUserCredsRequest( - name="name_value", - ) - - # Make the request - response = client.enable_user_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.EnableUserCredsRequest, dict]): - The request object. The request for - [FirestoreAdmin.EnableUserCreds][google.firestore.admin.v1.FirestoreAdmin.EnableUserCreds]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.UserCreds: - A Cloud Firestore User Creds. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.EnableUserCredsRequest): - request = firestore_admin.EnableUserCredsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.enable_user_creds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def disable_user_creds( - self, - request: Optional[Union[firestore_admin.DisableUserCredsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Disables a user creds. No-op if the user creds are - already disabled. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_disable_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DisableUserCredsRequest( - name="name_value", - ) - - # Make the request - response = client.disable_user_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.DisableUserCredsRequest, dict]): - The request object. The request for - [FirestoreAdmin.DisableUserCreds][google.firestore.admin.v1.FirestoreAdmin.DisableUserCreds]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.UserCreds: - A Cloud Firestore User Creds. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DisableUserCredsRequest): - request = firestore_admin.DisableUserCredsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.disable_user_creds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def reset_user_password( - self, - request: Optional[Union[firestore_admin.ResetUserPasswordRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Resets the password of a user creds. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_reset_user_password(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ResetUserPasswordRequest( - name="name_value", - ) - - # Make the request - response = client.reset_user_password(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ResetUserPasswordRequest, dict]): - The request object. The request for - [FirestoreAdmin.ResetUserPassword][google.firestore.admin.v1.FirestoreAdmin.ResetUserPassword]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.UserCreds: - A Cloud Firestore User Creds. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ResetUserPasswordRequest): - request = firestore_admin.ResetUserPasswordRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reset_user_password] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_user_creds( - self, - request: Optional[Union[firestore_admin.DeleteUserCredsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a user creds. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_delete_user_creds(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteUserCredsRequest( - name="name_value", - ) - - # Make the request - client.delete_user_creds(request=request) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.DeleteUserCredsRequest, dict]): - The request object. The request for - [FirestoreAdmin.DeleteUserCreds][google.firestore.admin.v1.FirestoreAdmin.DeleteUserCreds]. - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DeleteUserCredsRequest): - request = firestore_admin.DeleteUserCredsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_user_creds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_backup( - self, - request: Optional[Union[firestore_admin.GetBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backup.Backup: - r"""Gets information about a backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_get_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.GetBackupRequest, dict]): - The request object. The request for - [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. - name (str): - Required. Name of the backup to fetch. - - Format is - ``projects/{project}/locations/{location}/backups/{backup}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.Backup: - A Backup of a Cloud Firestore - Database. - The backup contains all documents and - index configurations for the given - database at a specific point in time. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetBackupRequest): - request = firestore_admin.GetBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_backups( - self, - request: Optional[Union[firestore_admin.ListBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListBackupsResponse: - r"""Lists all the backups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_list_backups(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_backups(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ListBackupsRequest, dict]): - The request object. The request for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - parent (str): - Required. The location to list backups from. - - Format is ``projects/{project}/locations/{location}``. - Use ``{location} = '-'`` to list backups from all - locations for the given project. This allows listing - backups from a single location or from all locations. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.ListBackupsResponse: - The response for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListBackupsRequest): - request = firestore_admin.ListBackupsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_backup( - self, - request: Optional[Union[firestore_admin.DeleteBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_delete_backup(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - client.delete_backup(request=request) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.DeleteBackupRequest, dict]): - The request object. The request for - [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. - name (str): - Required. Name of the backup to delete. - - format is - ``projects/{project}/locations/{location}/backups/{backup}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DeleteBackupRequest): - request = firestore_admin.DeleteBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def restore_database( - self, - request: Optional[Union[firestore_admin.RestoreDatabaseRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gac_operation.Operation: - r"""Creates a new database by restoring from an existing backup. - - The new database must be in the same cloud region or - multi-region location as the existing backup. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing backup. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the restore, with the - Operation's [metadata][google.longrunning.Operation.metadata] - field type being the - [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the - restore was successful. The new database is not readable or - writeable until the LRO has completed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_restore_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.RestoreDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - backup="backup_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.RestoreDatabaseRequest, dict]): - The request object. The request message for - [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.FirestoreAdmin.RestoreDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.RestoreDatabaseRequest): - request = firestore_admin.RestoreDatabaseRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restore_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - database.Database, - metadata_type=gfa_operation.RestoreDatabaseMetadata, - ) - - # Done; return the response. - return response - - def create_backup_schedule( - self, - request: Optional[ - Union[firestore_admin.CreateBackupScheduleRequest, dict] - ] = None, - *, - parent: Optional[str] = None, - backup_schedule: Optional[schedule.BackupSchedule] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> schedule.BackupSchedule: - r"""Creates a backup schedule on a database. - At most two backup schedules can be configured on a - database, one daily backup schedule and one weekly - backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_create_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.CreateBackupScheduleRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.CreateBackupScheduleRequest, dict]): - The request object. The request for - [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. - parent (str): - Required. The parent database. - - Format ``projects/{project}/databases/{database}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): - Required. The backup schedule to - create. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_schedule] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.CreateBackupScheduleRequest): - request = firestore_admin.CreateBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_schedule is not None: - request.backup_schedule = backup_schedule - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_backup_schedule( - self, - request: Optional[Union[firestore_admin.GetBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> schedule.BackupSchedule: - r"""Gets information about a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_get_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.GetBackupScheduleRequest, dict]): - The request object. The request for - [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. - name (str): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.GetBackupScheduleRequest): - request = firestore_admin.GetBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_backup_schedules( - self, - request: Optional[ - Union[firestore_admin.ListBackupSchedulesRequest, dict] - ] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListBackupSchedulesResponse: - r"""List backup schedules. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_list_backup_schedules(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_backup_schedules(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.ListBackupSchedulesRequest, dict]): - The request object. The request for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - parent (str): - Required. The parent database. - - Format is ``projects/{project}/databases/{database}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.ListBackupSchedulesResponse: - The response for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.ListBackupSchedulesRequest): - request = firestore_admin.ListBackupSchedulesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backup_schedules] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_backup_schedule( - self, - request: Optional[ - Union[firestore_admin.UpdateBackupScheduleRequest, dict] - ] = None, - *, - backup_schedule: Optional[schedule.BackupSchedule] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> schedule.BackupSchedule: - r"""Updates a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_update_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = client.update_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.UpdateBackupScheduleRequest, dict]): - The request object. The request for - [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. - backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): - Required. The backup schedule to - update. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_admin_v1.types.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup_schedule, update_mask] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.UpdateBackupScheduleRequest): - request = firestore_admin.UpdateBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("backup_schedule.name", request.backup_schedule.name),) - ), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_backup_schedule( - self, - request: Optional[ - Union[firestore_admin.DeleteBackupScheduleRequest, dict] - ] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_delete_backup_schedule(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - request = firestore_admin_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - client.delete_backup_schedule(request=request) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.DeleteBackupScheduleRequest, dict]): - The request object. The request for - [FirestoreAdmin.DeleteBackupSchedules][]. - name (str): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.DeleteBackupScheduleRequest): - request = firestore_admin.DeleteBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def clone_database( - self, - request: Optional[Union[firestore_admin.CloneDatabaseRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gac_operation.Operation: - r"""Creates a new database by cloning an existing one. - - The new database must be in the same cloud region or - multi-region location as the existing database. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing database. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the clone, with the Operation's - [metadata][google.longrunning.Operation.metadata] field type - being the - [CloneDatabaseMetadata][google.firestore.admin.v1.CloneDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the clone - was successful. The new database is not readable or writeable - until the LRO has completed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_admin_v1 - - def sample_clone_database(): - # Create a client - client = firestore_admin_v1.FirestoreAdminClient() - - # Initialize request argument(s) - pitr_snapshot = firestore_admin_v1.PitrSnapshot() - pitr_snapshot.database = "database_value" - - request = firestore_admin_v1.CloneDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - pitr_snapshot=pitr_snapshot, - ) - - # Make the request - operation = client.clone_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_admin_v1.types.CloneDatabaseRequest, dict]): - The request object. The request message for - [FirestoreAdmin.CloneDatabase][google.firestore.admin.v1.FirestoreAdmin.CloneDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.firestore_admin_v1.types.Database` - A Cloud Firestore Database. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore_admin.CloneDatabaseRequest): - request = firestore_admin.CloneDatabaseRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.clone_database] - - header_params = {} - - routing_param_regex = re.compile("^projects/(?P[^/]+)(?:/.*)?$") - regex_match = routing_param_regex.match(request.pitr_snapshot.database) - if regex_match and regex_match.group("project_id"): - header_params["project_id"] = regex_match.group("project_id") - - routing_param_regex = re.compile( - "^projects/[^/]+/databases/(?P[^/]+)(?:/.*)?$" - ) - regex_match = routing_param_regex.match(request.pitr_snapshot.database) - if regex_match and regex_match.group("database_id"): - header_params["database_id"] = regex_match.group("database_id") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = gac_operation.from_gapic( - response, - self._transport.operations_client, - database.Database, - metadata_type=gfa_operation.CloneDatabaseMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "FirestoreAdminClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ("FirestoreAdminClient",) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py deleted file mode 100644 index ee8737c19e..0000000000 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py +++ /dev/null @@ -1,354 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Sequence, - Tuple, - Optional, - Iterator, - Union, -) - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[ - retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None - ] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index - - -class ListIndexesPager: - """A pager for iterating through ``list_indexes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``indexes`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListIndexes`` requests and continue to iterate - through the ``indexes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., firestore_admin.ListIndexesResponse], - request: firestore_admin.ListIndexesRequest, - response: firestore_admin.ListIndexesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_admin_v1.types.ListIndexesRequest): - The initial request object. - response (google.cloud.firestore_admin_v1.types.ListIndexesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = firestore_admin.ListIndexesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[firestore_admin.ListIndexesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[index.Index]: - for page in self.pages: - yield from page.indexes - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListIndexesAsyncPager: - """A pager for iterating through ``list_indexes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``indexes`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListIndexes`` requests and continue to iterate - through the ``indexes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[firestore_admin.ListIndexesResponse]], - request: firestore_admin.ListIndexesRequest, - response: firestore_admin.ListIndexesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_admin_v1.types.ListIndexesRequest): - The initial request object. - response (google.cloud.firestore_admin_v1.types.ListIndexesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = firestore_admin.ListIndexesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[firestore_admin.ListIndexesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[index.Index]: - async def async_generator(): - async for page in self.pages: - for response in page.indexes: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListFieldsPager: - """A pager for iterating through ``list_fields`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``fields`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListFields`` requests and continue to iterate - through the ``fields`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., firestore_admin.ListFieldsResponse], - request: firestore_admin.ListFieldsRequest, - response: firestore_admin.ListFieldsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_admin_v1.types.ListFieldsRequest): - The initial request object. - response (google.cloud.firestore_admin_v1.types.ListFieldsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = firestore_admin.ListFieldsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[firestore_admin.ListFieldsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[field.Field]: - for page in self.pages: - yield from page.fields - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListFieldsAsyncPager: - """A pager for iterating through ``list_fields`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``fields`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListFields`` requests and continue to iterate - through the ``fields`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[firestore_admin.ListFieldsResponse]], - request: firestore_admin.ListFieldsRequest, - response: firestore_admin.ListFieldsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_admin_v1.types.ListFieldsRequest): - The initial request object. - response (google.cloud.firestore_admin_v1.types.ListFieldsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = firestore_admin.ListFieldsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[firestore_admin.ListFieldsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[field.Field]: - async def async_generator(): - async for page in self.pages: - for response in page.fields: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/README.rst b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/README.rst deleted file mode 100644 index ffcad7a891..0000000000 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`FirestoreAdminTransport` is the ABC for all transports. -- public child `FirestoreAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `FirestoreAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseFirestoreAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `FirestoreAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py deleted file mode 100644 index 36eaee23d7..0000000000 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import FirestoreAdminTransport -from .grpc import FirestoreAdminGrpcTransport -from .grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport -from .rest import FirestoreAdminRestTransport -from .rest import FirestoreAdminRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreAdminTransport]] -_transport_registry["grpc"] = FirestoreAdminGrpcTransport -_transport_registry["grpc_asyncio"] = FirestoreAdminGrpcAsyncIOTransport -_transport_registry["rest"] = FirestoreAdminRestTransport - -__all__ = ( - "FirestoreAdminTransport", - "FirestoreAdminGrpcTransport", - "FirestoreAdminGrpcAsyncIOTransport", - "FirestoreAdminRestTransport", - "FirestoreAdminRestInterceptor", -) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py deleted file mode 100644 index 7d582d9b5a..0000000000 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py +++ /dev/null @@ -1,744 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.firestore_admin_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.firestore_admin_v1.types import user_creds -from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class FirestoreAdminTransport(abc.ABC): - """Abstract transport class for FirestoreAdmin.""" - - AUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ) - - DEFAULT_HOST: str = "firestore.googleapis.com" - - def __init__( - self, - *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id - ) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience( - api_audience if api_audience else host - ) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if ( - always_use_jwt_access - and isinstance(credentials, service_account.Credentials) - and hasattr(service_account.Credentials, "with_always_use_jwt_access") - ): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_index: gapic_v1.method.wrap_method( - self.create_index, - default_timeout=60.0, - client_info=client_info, - ), - self.list_indexes: gapic_v1.method.wrap_method( - self.list_indexes, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_index: gapic_v1.method.wrap_method( - self.get_index, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_index: gapic_v1.method.wrap_method( - self.delete_index, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_field: gapic_v1.method.wrap_method( - self.get_field, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_field: gapic_v1.method.wrap_method( - self.update_field, - default_timeout=60.0, - client_info=client_info, - ), - self.list_fields: gapic_v1.method.wrap_method( - self.list_fields, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.export_documents: gapic_v1.method.wrap_method( - self.export_documents, - default_timeout=60.0, - client_info=client_info, - ), - self.import_documents: gapic_v1.method.wrap_method( - self.import_documents, - default_timeout=60.0, - client_info=client_info, - ), - self.bulk_delete_documents: gapic_v1.method.wrap_method( - self.bulk_delete_documents, - default_timeout=60.0, - client_info=client_info, - ), - self.create_database: gapic_v1.method.wrap_method( - self.create_database, - default_timeout=120.0, - client_info=client_info, - ), - self.get_database: gapic_v1.method.wrap_method( - self.get_database, - default_timeout=None, - client_info=client_info, - ), - self.list_databases: gapic_v1.method.wrap_method( - self.list_databases, - default_timeout=None, - client_info=client_info, - ), - self.update_database: gapic_v1.method.wrap_method( - self.update_database, - default_timeout=None, - client_info=client_info, - ), - self.delete_database: gapic_v1.method.wrap_method( - self.delete_database, - default_timeout=None, - client_info=client_info, - ), - self.create_user_creds: gapic_v1.method.wrap_method( - self.create_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.get_user_creds: gapic_v1.method.wrap_method( - self.get_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.list_user_creds: gapic_v1.method.wrap_method( - self.list_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.enable_user_creds: gapic_v1.method.wrap_method( - self.enable_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.disable_user_creds: gapic_v1.method.wrap_method( - self.disable_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.reset_user_password: gapic_v1.method.wrap_method( - self.reset_user_password, - default_timeout=None, - client_info=client_info, - ), - self.delete_user_creds: gapic_v1.method.wrap_method( - self.delete_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.get_backup: gapic_v1.method.wrap_method( - self.get_backup, - default_timeout=None, - client_info=client_info, - ), - self.list_backups: gapic_v1.method.wrap_method( - self.list_backups, - default_timeout=None, - client_info=client_info, - ), - self.delete_backup: gapic_v1.method.wrap_method( - self.delete_backup, - default_timeout=None, - client_info=client_info, - ), - self.restore_database: gapic_v1.method.wrap_method( - self.restore_database, - default_timeout=120.0, - client_info=client_info, - ), - self.create_backup_schedule: gapic_v1.method.wrap_method( - self.create_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - self.get_backup_schedule: gapic_v1.method.wrap_method( - self.get_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - self.list_backup_schedules: gapic_v1.method.wrap_method( - self.list_backup_schedules, - default_timeout=None, - client_info=client_info, - ), - self.update_backup_schedule: gapic_v1.method.wrap_method( - self.update_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - self.delete_backup_schedule: gapic_v1.method.wrap_method( - self.delete_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - self.clone_database: gapic_v1.method.wrap_method( - self.clone_database, - default_timeout=120.0, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_index( - self, - ) -> Callable[ - [firestore_admin.CreateIndexRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def list_indexes( - self, - ) -> Callable[ - [firestore_admin.ListIndexesRequest], - Union[ - firestore_admin.ListIndexesResponse, - Awaitable[firestore_admin.ListIndexesResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_index( - self, - ) -> Callable[ - [firestore_admin.GetIndexRequest], Union[index.Index, Awaitable[index.Index]] - ]: - raise NotImplementedError() - - @property - def delete_index( - self, - ) -> Callable[ - [firestore_admin.DeleteIndexRequest], - Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], - ]: - raise NotImplementedError() - - @property - def get_field( - self, - ) -> Callable[ - [firestore_admin.GetFieldRequest], Union[field.Field, Awaitable[field.Field]] - ]: - raise NotImplementedError() - - @property - def update_field( - self, - ) -> Callable[ - [firestore_admin.UpdateFieldRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def list_fields( - self, - ) -> Callable[ - [firestore_admin.ListFieldsRequest], - Union[ - firestore_admin.ListFieldsResponse, - Awaitable[firestore_admin.ListFieldsResponse], - ], - ]: - raise NotImplementedError() - - @property - def export_documents( - self, - ) -> Callable[ - [firestore_admin.ExportDocumentsRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def import_documents( - self, - ) -> Callable[ - [firestore_admin.ImportDocumentsRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def bulk_delete_documents( - self, - ) -> Callable[ - [firestore_admin.BulkDeleteDocumentsRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def create_database( - self, - ) -> Callable[ - [firestore_admin.CreateDatabaseRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def get_database( - self, - ) -> Callable[ - [firestore_admin.GetDatabaseRequest], - Union[database.Database, Awaitable[database.Database]], - ]: - raise NotImplementedError() - - @property - def list_databases( - self, - ) -> Callable[ - [firestore_admin.ListDatabasesRequest], - Union[ - firestore_admin.ListDatabasesResponse, - Awaitable[firestore_admin.ListDatabasesResponse], - ], - ]: - raise NotImplementedError() - - @property - def update_database( - self, - ) -> Callable[ - [firestore_admin.UpdateDatabaseRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def delete_database( - self, - ) -> Callable[ - [firestore_admin.DeleteDatabaseRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def create_user_creds( - self, - ) -> Callable[ - [firestore_admin.CreateUserCredsRequest], - Union[gfa_user_creds.UserCreds, Awaitable[gfa_user_creds.UserCreds]], - ]: - raise NotImplementedError() - - @property - def get_user_creds( - self, - ) -> Callable[ - [firestore_admin.GetUserCredsRequest], - Union[user_creds.UserCreds, Awaitable[user_creds.UserCreds]], - ]: - raise NotImplementedError() - - @property - def list_user_creds( - self, - ) -> Callable[ - [firestore_admin.ListUserCredsRequest], - Union[ - firestore_admin.ListUserCredsResponse, - Awaitable[firestore_admin.ListUserCredsResponse], - ], - ]: - raise NotImplementedError() - - @property - def enable_user_creds( - self, - ) -> Callable[ - [firestore_admin.EnableUserCredsRequest], - Union[user_creds.UserCreds, Awaitable[user_creds.UserCreds]], - ]: - raise NotImplementedError() - - @property - def disable_user_creds( - self, - ) -> Callable[ - [firestore_admin.DisableUserCredsRequest], - Union[user_creds.UserCreds, Awaitable[user_creds.UserCreds]], - ]: - raise NotImplementedError() - - @property - def reset_user_password( - self, - ) -> Callable[ - [firestore_admin.ResetUserPasswordRequest], - Union[user_creds.UserCreds, Awaitable[user_creds.UserCreds]], - ]: - raise NotImplementedError() - - @property - def delete_user_creds( - self, - ) -> Callable[ - [firestore_admin.DeleteUserCredsRequest], - Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], - ]: - raise NotImplementedError() - - @property - def get_backup( - self, - ) -> Callable[ - [firestore_admin.GetBackupRequest], - Union[backup.Backup, Awaitable[backup.Backup]], - ]: - raise NotImplementedError() - - @property - def list_backups( - self, - ) -> Callable[ - [firestore_admin.ListBackupsRequest], - Union[ - firestore_admin.ListBackupsResponse, - Awaitable[firestore_admin.ListBackupsResponse], - ], - ]: - raise NotImplementedError() - - @property - def delete_backup( - self, - ) -> Callable[ - [firestore_admin.DeleteBackupRequest], - Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], - ]: - raise NotImplementedError() - - @property - def restore_database( - self, - ) -> Callable[ - [firestore_admin.RestoreDatabaseRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def create_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.CreateBackupScheduleRequest], - Union[schedule.BackupSchedule, Awaitable[schedule.BackupSchedule]], - ]: - raise NotImplementedError() - - @property - def get_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.GetBackupScheduleRequest], - Union[schedule.BackupSchedule, Awaitable[schedule.BackupSchedule]], - ]: - raise NotImplementedError() - - @property - def list_backup_schedules( - self, - ) -> Callable[ - [firestore_admin.ListBackupSchedulesRequest], - Union[ - firestore_admin.ListBackupSchedulesResponse, - Awaitable[firestore_admin.ListBackupSchedulesResponse], - ], - ]: - raise NotImplementedError() - - @property - def update_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.UpdateBackupScheduleRequest], - Union[schedule.BackupSchedule, Awaitable[schedule.BackupSchedule]], - ]: - raise NotImplementedError() - - @property - def delete_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.DeleteBackupScheduleRequest], - Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], - ]: - raise NotImplementedError() - - @property - def clone_database( - self, - ) -> Callable[ - [firestore_admin.CloneDatabaseRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[ - operations_pb2.ListOperationsResponse, - Awaitable[operations_pb2.ListOperationsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ("FirestoreAdminTransport",) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py deleted file mode 100644 index f6531a1906..0000000000 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py +++ /dev/null @@ -1,1406 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.firestore_admin_v1.types import user_creds -from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - std_logging.DEBUG - ) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = ( - dict([(k, str(v)) for k, v in response_metadata]) - if response_metadata - else None - ) - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class FirestoreAdminGrpcTransport(FirestoreAdminTransport): - """gRPC backend transport for FirestoreAdmin. - - The Cloud Firestore Admin API. - - This API provides several administrative services for Cloud - Firestore. - - Project, Database, Namespace, Collection, Collection Group, and - Document are used as defined in the Google Cloud Firestore API. - - Operation: An Operation represents work being performed in the - background. - - The index service manages Cloud Firestore indexes. - - Index creation is performed asynchronously. An Operation resource is - created for each such asynchronous operation. The state of the - operation (including any errors encountered) may be queried via the - Operation resource. - - The Operations collection provides a record of actions performed for - the specified Project (including any Operations in progress). - Operations are not created directly but through calls on other - collections or resources. - - An Operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. Operations are garbage - collected after 30 days. By default, ListOperations will only return - in progress and failed operations. To list completed operation, - issue a ListOperations request with the filter ``done: true``. - - Operations are created by service ``FirestoreAdmin``, but are - accessed via service ``google.longrunning.Operations``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _stubs: Dict[str, Callable] - - def __init__( - self, - *, - host: str = "firestore.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel( - self._grpc_channel, self._interceptor - ) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel( - cls, - host: str = "firestore.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service.""" - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_index( - self, - ) -> Callable[[firestore_admin.CreateIndexRequest], operations_pb2.Operation]: - r"""Return a callable for the create index method over gRPC. - - Creates a composite index. This returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the creation. The - metadata for the operation will be the type - [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. - - Returns: - Callable[[~.CreateIndexRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_index" not in self._stubs: - self._stubs["create_index"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex", - request_serializer=firestore_admin.CreateIndexRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["create_index"] - - @property - def list_indexes( - self, - ) -> Callable[ - [firestore_admin.ListIndexesRequest], firestore_admin.ListIndexesResponse - ]: - r"""Return a callable for the list indexes method over gRPC. - - Lists composite indexes. - - Returns: - Callable[[~.ListIndexesRequest], - ~.ListIndexesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_indexes" not in self._stubs: - self._stubs["list_indexes"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListIndexes", - request_serializer=firestore_admin.ListIndexesRequest.serialize, - response_deserializer=firestore_admin.ListIndexesResponse.deserialize, - ) - return self._stubs["list_indexes"] - - @property - def get_index(self) -> Callable[[firestore_admin.GetIndexRequest], index.Index]: - r"""Return a callable for the get index method over gRPC. - - Gets a composite index. - - Returns: - Callable[[~.GetIndexRequest], - ~.Index]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_index" not in self._stubs: - self._stubs["get_index"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetIndex", - request_serializer=firestore_admin.GetIndexRequest.serialize, - response_deserializer=index.Index.deserialize, - ) - return self._stubs["get_index"] - - @property - def delete_index( - self, - ) -> Callable[[firestore_admin.DeleteIndexRequest], empty_pb2.Empty]: - r"""Return a callable for the delete index method over gRPC. - - Deletes a composite index. - - Returns: - Callable[[~.DeleteIndexRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_index" not in self._stubs: - self._stubs["delete_index"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex", - request_serializer=firestore_admin.DeleteIndexRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_index"] - - @property - def get_field(self) -> Callable[[firestore_admin.GetFieldRequest], field.Field]: - r"""Return a callable for the get field method over gRPC. - - Gets the metadata and configuration for a Field. - - Returns: - Callable[[~.GetFieldRequest], - ~.Field]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_field" not in self._stubs: - self._stubs["get_field"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetField", - request_serializer=firestore_admin.GetFieldRequest.serialize, - response_deserializer=field.Field.deserialize, - ) - return self._stubs["get_field"] - - @property - def update_field( - self, - ) -> Callable[[firestore_admin.UpdateFieldRequest], operations_pb2.Operation]: - r"""Return a callable for the update field method over gRPC. - - Updates a field configuration. Currently, field updates apply - only to single field index configuration. However, calls to - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] - should provide a field mask to avoid changing any configuration - that the caller isn't aware of. The field mask should be - specified as: ``{ paths: "index_config" }``. - - This call returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the field update. The - metadata for the operation will be the type - [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. - - To configure the default field settings for the database, use - the special ``Field`` with resource name: - ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. - - Returns: - Callable[[~.UpdateFieldRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_field" not in self._stubs: - self._stubs["update_field"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/UpdateField", - request_serializer=firestore_admin.UpdateFieldRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_field"] - - @property - def list_fields( - self, - ) -> Callable[ - [firestore_admin.ListFieldsRequest], firestore_admin.ListFieldsResponse - ]: - r"""Return a callable for the list fields method over gRPC. - - Lists the field configuration and metadata for this database. - - Currently, - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - only supports listing fields that have been explicitly - overridden. To issue this query, call - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - with the filter set to ``indexConfig.usesAncestorConfig:false`` - or ``ttlConfig:*``. - - Returns: - Callable[[~.ListFieldsRequest], - ~.ListFieldsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_fields" not in self._stubs: - self._stubs["list_fields"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListFields", - request_serializer=firestore_admin.ListFieldsRequest.serialize, - response_deserializer=firestore_admin.ListFieldsResponse.deserialize, - ) - return self._stubs["list_fields"] - - @property - def export_documents( - self, - ) -> Callable[[firestore_admin.ExportDocumentsRequest], operations_pb2.Operation]: - r"""Return a callable for the export documents method over gRPC. - - Exports a copy of all or a subset of documents from - Google Cloud Firestore to another storage system, such - as Google Cloud Storage. Recent updates to documents may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - For more details on export behavior and output format, - refer to: - - https://cloud.google.com/firestore/docs/manage-data/export-import - - Returns: - Callable[[~.ExportDocumentsRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "export_documents" not in self._stubs: - self._stubs["export_documents"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments", - request_serializer=firestore_admin.ExportDocumentsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["export_documents"] - - @property - def import_documents( - self, - ) -> Callable[[firestore_admin.ImportDocumentsRequest], operations_pb2.Operation]: - r"""Return a callable for the import documents method over gRPC. - - Imports documents into Google Cloud Firestore. - Existing documents with the same name are overwritten. - The import occurs in the background and its progress can - be monitored and managed via the Operation resource that - is created. If an ImportDocuments operation is - cancelled, it is possible that a subset of the data has - already been imported to Cloud Firestore. - - Returns: - Callable[[~.ImportDocumentsRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "import_documents" not in self._stubs: - self._stubs["import_documents"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments", - request_serializer=firestore_admin.ImportDocumentsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["import_documents"] - - @property - def bulk_delete_documents( - self, - ) -> Callable[ - [firestore_admin.BulkDeleteDocumentsRequest], operations_pb2.Operation - ]: - r"""Return a callable for the bulk delete documents method over gRPC. - - Bulk deletes a subset of documents from Google Cloud - Firestore. Documents created or updated after the - underlying system starts to process the request will not - be deleted. The bulk delete occurs in the background and - its progress can be monitored and managed via the - Operation resource that is created. - - For more details on bulk delete behavior, refer to: - - https://cloud.google.com/firestore/docs/manage-data/bulk-delete - - Returns: - Callable[[~.BulkDeleteDocumentsRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "bulk_delete_documents" not in self._stubs: - self._stubs["bulk_delete_documents"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/BulkDeleteDocuments", - request_serializer=firestore_admin.BulkDeleteDocumentsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["bulk_delete_documents"] - - @property - def create_database( - self, - ) -> Callable[[firestore_admin.CreateDatabaseRequest], operations_pb2.Operation]: - r"""Return a callable for the create database method over gRPC. - - Create a database. - - Returns: - Callable[[~.CreateDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_database" not in self._stubs: - self._stubs["create_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/CreateDatabase", - request_serializer=firestore_admin.CreateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["create_database"] - - @property - def get_database( - self, - ) -> Callable[[firestore_admin.GetDatabaseRequest], database.Database]: - r"""Return a callable for the get database method over gRPC. - - Gets information about a database. - - Returns: - Callable[[~.GetDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_database" not in self._stubs: - self._stubs["get_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetDatabase", - request_serializer=firestore_admin.GetDatabaseRequest.serialize, - response_deserializer=database.Database.deserialize, - ) - return self._stubs["get_database"] - - @property - def list_databases( - self, - ) -> Callable[ - [firestore_admin.ListDatabasesRequest], firestore_admin.ListDatabasesResponse - ]: - r"""Return a callable for the list databases method over gRPC. - - List all the databases in the project. - - Returns: - Callable[[~.ListDatabasesRequest], - ~.ListDatabasesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_databases" not in self._stubs: - self._stubs["list_databases"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListDatabases", - request_serializer=firestore_admin.ListDatabasesRequest.serialize, - response_deserializer=firestore_admin.ListDatabasesResponse.deserialize, - ) - return self._stubs["list_databases"] - - @property - def update_database( - self, - ) -> Callable[[firestore_admin.UpdateDatabaseRequest], operations_pb2.Operation]: - r"""Return a callable for the update database method over gRPC. - - Updates a database. - - Returns: - Callable[[~.UpdateDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_database" not in self._stubs: - self._stubs["update_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/UpdateDatabase", - request_serializer=firestore_admin.UpdateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_database"] - - @property - def delete_database( - self, - ) -> Callable[[firestore_admin.DeleteDatabaseRequest], operations_pb2.Operation]: - r"""Return a callable for the delete database method over gRPC. - - Deletes a database. - - Returns: - Callable[[~.DeleteDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_database" not in self._stubs: - self._stubs["delete_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DeleteDatabase", - request_serializer=firestore_admin.DeleteDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["delete_database"] - - @property - def create_user_creds( - self, - ) -> Callable[[firestore_admin.CreateUserCredsRequest], gfa_user_creds.UserCreds]: - r"""Return a callable for the create user creds method over gRPC. - - Create a user creds. - - Returns: - Callable[[~.CreateUserCredsRequest], - ~.UserCreds]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_user_creds" not in self._stubs: - self._stubs["create_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/CreateUserCreds", - request_serializer=firestore_admin.CreateUserCredsRequest.serialize, - response_deserializer=gfa_user_creds.UserCreds.deserialize, - ) - return self._stubs["create_user_creds"] - - @property - def get_user_creds( - self, - ) -> Callable[[firestore_admin.GetUserCredsRequest], user_creds.UserCreds]: - r"""Return a callable for the get user creds method over gRPC. - - Gets a user creds resource. Note that the returned - resource does not contain the secret value itself. - - Returns: - Callable[[~.GetUserCredsRequest], - ~.UserCreds]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_user_creds" not in self._stubs: - self._stubs["get_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetUserCreds", - request_serializer=firestore_admin.GetUserCredsRequest.serialize, - response_deserializer=user_creds.UserCreds.deserialize, - ) - return self._stubs["get_user_creds"] - - @property - def list_user_creds( - self, - ) -> Callable[ - [firestore_admin.ListUserCredsRequest], firestore_admin.ListUserCredsResponse - ]: - r"""Return a callable for the list user creds method over gRPC. - - List all user creds in the database. Note that the - returned resource does not contain the secret value - itself. - - Returns: - Callable[[~.ListUserCredsRequest], - ~.ListUserCredsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_user_creds" not in self._stubs: - self._stubs["list_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListUserCreds", - request_serializer=firestore_admin.ListUserCredsRequest.serialize, - response_deserializer=firestore_admin.ListUserCredsResponse.deserialize, - ) - return self._stubs["list_user_creds"] - - @property - def enable_user_creds( - self, - ) -> Callable[[firestore_admin.EnableUserCredsRequest], user_creds.UserCreds]: - r"""Return a callable for the enable user creds method over gRPC. - - Enables a user creds. No-op if the user creds are - already enabled. - - Returns: - Callable[[~.EnableUserCredsRequest], - ~.UserCreds]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "enable_user_creds" not in self._stubs: - self._stubs["enable_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/EnableUserCreds", - request_serializer=firestore_admin.EnableUserCredsRequest.serialize, - response_deserializer=user_creds.UserCreds.deserialize, - ) - return self._stubs["enable_user_creds"] - - @property - def disable_user_creds( - self, - ) -> Callable[[firestore_admin.DisableUserCredsRequest], user_creds.UserCreds]: - r"""Return a callable for the disable user creds method over gRPC. - - Disables a user creds. No-op if the user creds are - already disabled. - - Returns: - Callable[[~.DisableUserCredsRequest], - ~.UserCreds]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "disable_user_creds" not in self._stubs: - self._stubs["disable_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DisableUserCreds", - request_serializer=firestore_admin.DisableUserCredsRequest.serialize, - response_deserializer=user_creds.UserCreds.deserialize, - ) - return self._stubs["disable_user_creds"] - - @property - def reset_user_password( - self, - ) -> Callable[[firestore_admin.ResetUserPasswordRequest], user_creds.UserCreds]: - r"""Return a callable for the reset user password method over gRPC. - - Resets the password of a user creds. - - Returns: - Callable[[~.ResetUserPasswordRequest], - ~.UserCreds]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "reset_user_password" not in self._stubs: - self._stubs["reset_user_password"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ResetUserPassword", - request_serializer=firestore_admin.ResetUserPasswordRequest.serialize, - response_deserializer=user_creds.UserCreds.deserialize, - ) - return self._stubs["reset_user_password"] - - @property - def delete_user_creds( - self, - ) -> Callable[[firestore_admin.DeleteUserCredsRequest], empty_pb2.Empty]: - r"""Return a callable for the delete user creds method over gRPC. - - Deletes a user creds. - - Returns: - Callable[[~.DeleteUserCredsRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_user_creds" not in self._stubs: - self._stubs["delete_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DeleteUserCreds", - request_serializer=firestore_admin.DeleteUserCredsRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_user_creds"] - - @property - def get_backup(self) -> Callable[[firestore_admin.GetBackupRequest], backup.Backup]: - r"""Return a callable for the get backup method over gRPC. - - Gets information about a backup. - - Returns: - Callable[[~.GetBackupRequest], - ~.Backup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_backup" not in self._stubs: - self._stubs["get_backup"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetBackup", - request_serializer=firestore_admin.GetBackupRequest.serialize, - response_deserializer=backup.Backup.deserialize, - ) - return self._stubs["get_backup"] - - @property - def list_backups( - self, - ) -> Callable[ - [firestore_admin.ListBackupsRequest], firestore_admin.ListBackupsResponse - ]: - r"""Return a callable for the list backups method over gRPC. - - Lists all the backups. - - Returns: - Callable[[~.ListBackupsRequest], - ~.ListBackupsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_backups" not in self._stubs: - self._stubs["list_backups"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListBackups", - request_serializer=firestore_admin.ListBackupsRequest.serialize, - response_deserializer=firestore_admin.ListBackupsResponse.deserialize, - ) - return self._stubs["list_backups"] - - @property - def delete_backup( - self, - ) -> Callable[[firestore_admin.DeleteBackupRequest], empty_pb2.Empty]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a backup. - - Returns: - Callable[[~.DeleteBackupRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_backup" not in self._stubs: - self._stubs["delete_backup"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DeleteBackup", - request_serializer=firestore_admin.DeleteBackupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_backup"] - - @property - def restore_database( - self, - ) -> Callable[[firestore_admin.RestoreDatabaseRequest], operations_pb2.Operation]: - r"""Return a callable for the restore database method over gRPC. - - Creates a new database by restoring from an existing backup. - - The new database must be in the same cloud region or - multi-region location as the existing backup. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing backup. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the restore, with the - Operation's [metadata][google.longrunning.Operation.metadata] - field type being the - [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the - restore was successful. The new database is not readable or - writeable until the LRO has completed. - - Returns: - Callable[[~.RestoreDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "restore_database" not in self._stubs: - self._stubs["restore_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/RestoreDatabase", - request_serializer=firestore_admin.RestoreDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["restore_database"] - - @property - def create_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.CreateBackupScheduleRequest], schedule.BackupSchedule - ]: - r"""Return a callable for the create backup schedule method over gRPC. - - Creates a backup schedule on a database. - At most two backup schedules can be configured on a - database, one daily backup schedule and one weekly - backup schedule. - - Returns: - Callable[[~.CreateBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_backup_schedule" not in self._stubs: - self._stubs["create_backup_schedule"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/CreateBackupSchedule", - request_serializer=firestore_admin.CreateBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs["create_backup_schedule"] - - @property - def get_backup_schedule( - self, - ) -> Callable[[firestore_admin.GetBackupScheduleRequest], schedule.BackupSchedule]: - r"""Return a callable for the get backup schedule method over gRPC. - - Gets information about a backup schedule. - - Returns: - Callable[[~.GetBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_backup_schedule" not in self._stubs: - self._stubs["get_backup_schedule"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetBackupSchedule", - request_serializer=firestore_admin.GetBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs["get_backup_schedule"] - - @property - def list_backup_schedules( - self, - ) -> Callable[ - [firestore_admin.ListBackupSchedulesRequest], - firestore_admin.ListBackupSchedulesResponse, - ]: - r"""Return a callable for the list backup schedules method over gRPC. - - List backup schedules. - - Returns: - Callable[[~.ListBackupSchedulesRequest], - ~.ListBackupSchedulesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_backup_schedules" not in self._stubs: - self._stubs["list_backup_schedules"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListBackupSchedules", - request_serializer=firestore_admin.ListBackupSchedulesRequest.serialize, - response_deserializer=firestore_admin.ListBackupSchedulesResponse.deserialize, - ) - return self._stubs["list_backup_schedules"] - - @property - def update_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.UpdateBackupScheduleRequest], schedule.BackupSchedule - ]: - r"""Return a callable for the update backup schedule method over gRPC. - - Updates a backup schedule. - - Returns: - Callable[[~.UpdateBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_backup_schedule" not in self._stubs: - self._stubs["update_backup_schedule"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/UpdateBackupSchedule", - request_serializer=firestore_admin.UpdateBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs["update_backup_schedule"] - - @property - def delete_backup_schedule( - self, - ) -> Callable[[firestore_admin.DeleteBackupScheduleRequest], empty_pb2.Empty]: - r"""Return a callable for the delete backup schedule method over gRPC. - - Deletes a backup schedule. - - Returns: - Callable[[~.DeleteBackupScheduleRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_backup_schedule" not in self._stubs: - self._stubs["delete_backup_schedule"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DeleteBackupSchedule", - request_serializer=firestore_admin.DeleteBackupScheduleRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_backup_schedule"] - - @property - def clone_database( - self, - ) -> Callable[[firestore_admin.CloneDatabaseRequest], operations_pb2.Operation]: - r"""Return a callable for the clone database method over gRPC. - - Creates a new database by cloning an existing one. - - The new database must be in the same cloud region or - multi-region location as the existing database. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing database. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the clone, with the Operation's - [metadata][google.longrunning.Operation.metadata] field type - being the - [CloneDatabaseMetadata][google.firestore.admin.v1.CloneDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the clone - was successful. The new database is not readable or writeable - until the LRO has completed. - - Returns: - Callable[[~.CloneDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "clone_database" not in self._stubs: - self._stubs["clone_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/CloneDatabase", - request_serializer=firestore_admin.CloneDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["clone_database"] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse - ]: - r"""Return a callable for the list_operations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ("FirestoreAdminGrpcTransport",) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py deleted file mode 100644 index 117707853c..0000000000 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py +++ /dev/null @@ -1,1705 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.firestore_admin_v1.types import user_creds -from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO -from .grpc import FirestoreAdminGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor( - grpc.aio.UnaryUnaryClientInterceptor -): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - std_logging.DEBUG - ) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = ( - dict([(k, str(v)) for k, v in response_metadata]) - if response_metadata - else None - ) - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class FirestoreAdminGrpcAsyncIOTransport(FirestoreAdminTransport): - """gRPC AsyncIO backend transport for FirestoreAdmin. - - The Cloud Firestore Admin API. - - This API provides several administrative services for Cloud - Firestore. - - Project, Database, Namespace, Collection, Collection Group, and - Document are used as defined in the Google Cloud Firestore API. - - Operation: An Operation represents work being performed in the - background. - - The index service manages Cloud Firestore indexes. - - Index creation is performed asynchronously. An Operation resource is - created for each such asynchronous operation. The state of the - operation (including any errors encountered) may be queried via the - Operation resource. - - The Operations collection provides a record of actions performed for - the specified Project (including any Operations in progress). - Operations are not created directly but through calls on other - collections or resources. - - An Operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. Operations are garbage - collected after 30 days. By default, ListOperations will only return - in progress and failed operations. To list completed operation, - issue a ListOperations request with the filter ``done: true``. - - Operations are created by service ``FirestoreAdmin``, but are - accessed via service ``google.longrunning.Operations``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel( - cls, - host: str = "firestore.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - def __init__( - self, - *, - host: str = "firestore.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = ( - "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - ) - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_index( - self, - ) -> Callable[ - [firestore_admin.CreateIndexRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the create index method over gRPC. - - Creates a composite index. This returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the creation. The - metadata for the operation will be the type - [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. - - Returns: - Callable[[~.CreateIndexRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_index" not in self._stubs: - self._stubs["create_index"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex", - request_serializer=firestore_admin.CreateIndexRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["create_index"] - - @property - def list_indexes( - self, - ) -> Callable[ - [firestore_admin.ListIndexesRequest], - Awaitable[firestore_admin.ListIndexesResponse], - ]: - r"""Return a callable for the list indexes method over gRPC. - - Lists composite indexes. - - Returns: - Callable[[~.ListIndexesRequest], - Awaitable[~.ListIndexesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_indexes" not in self._stubs: - self._stubs["list_indexes"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListIndexes", - request_serializer=firestore_admin.ListIndexesRequest.serialize, - response_deserializer=firestore_admin.ListIndexesResponse.deserialize, - ) - return self._stubs["list_indexes"] - - @property - def get_index( - self, - ) -> Callable[[firestore_admin.GetIndexRequest], Awaitable[index.Index]]: - r"""Return a callable for the get index method over gRPC. - - Gets a composite index. - - Returns: - Callable[[~.GetIndexRequest], - Awaitable[~.Index]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_index" not in self._stubs: - self._stubs["get_index"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetIndex", - request_serializer=firestore_admin.GetIndexRequest.serialize, - response_deserializer=index.Index.deserialize, - ) - return self._stubs["get_index"] - - @property - def delete_index( - self, - ) -> Callable[[firestore_admin.DeleteIndexRequest], Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete index method over gRPC. - - Deletes a composite index. - - Returns: - Callable[[~.DeleteIndexRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_index" not in self._stubs: - self._stubs["delete_index"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex", - request_serializer=firestore_admin.DeleteIndexRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_index"] - - @property - def get_field( - self, - ) -> Callable[[firestore_admin.GetFieldRequest], Awaitable[field.Field]]: - r"""Return a callable for the get field method over gRPC. - - Gets the metadata and configuration for a Field. - - Returns: - Callable[[~.GetFieldRequest], - Awaitable[~.Field]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_field" not in self._stubs: - self._stubs["get_field"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetField", - request_serializer=firestore_admin.GetFieldRequest.serialize, - response_deserializer=field.Field.deserialize, - ) - return self._stubs["get_field"] - - @property - def update_field( - self, - ) -> Callable[ - [firestore_admin.UpdateFieldRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the update field method over gRPC. - - Updates a field configuration. Currently, field updates apply - only to single field index configuration. However, calls to - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] - should provide a field mask to avoid changing any configuration - that the caller isn't aware of. The field mask should be - specified as: ``{ paths: "index_config" }``. - - This call returns a - [google.longrunning.Operation][google.longrunning.Operation] - which may be used to track the status of the field update. The - metadata for the operation will be the type - [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. - - To configure the default field settings for the database, use - the special ``Field`` with resource name: - ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``. - - Returns: - Callable[[~.UpdateFieldRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_field" not in self._stubs: - self._stubs["update_field"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/UpdateField", - request_serializer=firestore_admin.UpdateFieldRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_field"] - - @property - def list_fields( - self, - ) -> Callable[ - [firestore_admin.ListFieldsRequest], - Awaitable[firestore_admin.ListFieldsResponse], - ]: - r"""Return a callable for the list fields method over gRPC. - - Lists the field configuration and metadata for this database. - - Currently, - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - only supports listing fields that have been explicitly - overridden. To issue this query, call - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - with the filter set to ``indexConfig.usesAncestorConfig:false`` - or ``ttlConfig:*``. - - Returns: - Callable[[~.ListFieldsRequest], - Awaitable[~.ListFieldsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_fields" not in self._stubs: - self._stubs["list_fields"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListFields", - request_serializer=firestore_admin.ListFieldsRequest.serialize, - response_deserializer=firestore_admin.ListFieldsResponse.deserialize, - ) - return self._stubs["list_fields"] - - @property - def export_documents( - self, - ) -> Callable[ - [firestore_admin.ExportDocumentsRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the export documents method over gRPC. - - Exports a copy of all or a subset of documents from - Google Cloud Firestore to another storage system, such - as Google Cloud Storage. Recent updates to documents may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - For more details on export behavior and output format, - refer to: - - https://cloud.google.com/firestore/docs/manage-data/export-import - - Returns: - Callable[[~.ExportDocumentsRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "export_documents" not in self._stubs: - self._stubs["export_documents"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments", - request_serializer=firestore_admin.ExportDocumentsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["export_documents"] - - @property - def import_documents( - self, - ) -> Callable[ - [firestore_admin.ImportDocumentsRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the import documents method over gRPC. - - Imports documents into Google Cloud Firestore. - Existing documents with the same name are overwritten. - The import occurs in the background and its progress can - be monitored and managed via the Operation resource that - is created. If an ImportDocuments operation is - cancelled, it is possible that a subset of the data has - already been imported to Cloud Firestore. - - Returns: - Callable[[~.ImportDocumentsRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "import_documents" not in self._stubs: - self._stubs["import_documents"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments", - request_serializer=firestore_admin.ImportDocumentsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["import_documents"] - - @property - def bulk_delete_documents( - self, - ) -> Callable[ - [firestore_admin.BulkDeleteDocumentsRequest], - Awaitable[operations_pb2.Operation], - ]: - r"""Return a callable for the bulk delete documents method over gRPC. - - Bulk deletes a subset of documents from Google Cloud - Firestore. Documents created or updated after the - underlying system starts to process the request will not - be deleted. The bulk delete occurs in the background and - its progress can be monitored and managed via the - Operation resource that is created. - - For more details on bulk delete behavior, refer to: - - https://cloud.google.com/firestore/docs/manage-data/bulk-delete - - Returns: - Callable[[~.BulkDeleteDocumentsRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "bulk_delete_documents" not in self._stubs: - self._stubs["bulk_delete_documents"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/BulkDeleteDocuments", - request_serializer=firestore_admin.BulkDeleteDocumentsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["bulk_delete_documents"] - - @property - def create_database( - self, - ) -> Callable[ - [firestore_admin.CreateDatabaseRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the create database method over gRPC. - - Create a database. - - Returns: - Callable[[~.CreateDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_database" not in self._stubs: - self._stubs["create_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/CreateDatabase", - request_serializer=firestore_admin.CreateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["create_database"] - - @property - def get_database( - self, - ) -> Callable[[firestore_admin.GetDatabaseRequest], Awaitable[database.Database]]: - r"""Return a callable for the get database method over gRPC. - - Gets information about a database. - - Returns: - Callable[[~.GetDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_database" not in self._stubs: - self._stubs["get_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetDatabase", - request_serializer=firestore_admin.GetDatabaseRequest.serialize, - response_deserializer=database.Database.deserialize, - ) - return self._stubs["get_database"] - - @property - def list_databases( - self, - ) -> Callable[ - [firestore_admin.ListDatabasesRequest], - Awaitable[firestore_admin.ListDatabasesResponse], - ]: - r"""Return a callable for the list databases method over gRPC. - - List all the databases in the project. - - Returns: - Callable[[~.ListDatabasesRequest], - Awaitable[~.ListDatabasesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_databases" not in self._stubs: - self._stubs["list_databases"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListDatabases", - request_serializer=firestore_admin.ListDatabasesRequest.serialize, - response_deserializer=firestore_admin.ListDatabasesResponse.deserialize, - ) - return self._stubs["list_databases"] - - @property - def update_database( - self, - ) -> Callable[ - [firestore_admin.UpdateDatabaseRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the update database method over gRPC. - - Updates a database. - - Returns: - Callable[[~.UpdateDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_database" not in self._stubs: - self._stubs["update_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/UpdateDatabase", - request_serializer=firestore_admin.UpdateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_database"] - - @property - def delete_database( - self, - ) -> Callable[ - [firestore_admin.DeleteDatabaseRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the delete database method over gRPC. - - Deletes a database. - - Returns: - Callable[[~.DeleteDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_database" not in self._stubs: - self._stubs["delete_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DeleteDatabase", - request_serializer=firestore_admin.DeleteDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["delete_database"] - - @property - def create_user_creds( - self, - ) -> Callable[ - [firestore_admin.CreateUserCredsRequest], Awaitable[gfa_user_creds.UserCreds] - ]: - r"""Return a callable for the create user creds method over gRPC. - - Create a user creds. - - Returns: - Callable[[~.CreateUserCredsRequest], - Awaitable[~.UserCreds]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_user_creds" not in self._stubs: - self._stubs["create_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/CreateUserCreds", - request_serializer=firestore_admin.CreateUserCredsRequest.serialize, - response_deserializer=gfa_user_creds.UserCreds.deserialize, - ) - return self._stubs["create_user_creds"] - - @property - def get_user_creds( - self, - ) -> Callable[ - [firestore_admin.GetUserCredsRequest], Awaitable[user_creds.UserCreds] - ]: - r"""Return a callable for the get user creds method over gRPC. - - Gets a user creds resource. Note that the returned - resource does not contain the secret value itself. - - Returns: - Callable[[~.GetUserCredsRequest], - Awaitable[~.UserCreds]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_user_creds" not in self._stubs: - self._stubs["get_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetUserCreds", - request_serializer=firestore_admin.GetUserCredsRequest.serialize, - response_deserializer=user_creds.UserCreds.deserialize, - ) - return self._stubs["get_user_creds"] - - @property - def list_user_creds( - self, - ) -> Callable[ - [firestore_admin.ListUserCredsRequest], - Awaitable[firestore_admin.ListUserCredsResponse], - ]: - r"""Return a callable for the list user creds method over gRPC. - - List all user creds in the database. Note that the - returned resource does not contain the secret value - itself. - - Returns: - Callable[[~.ListUserCredsRequest], - Awaitable[~.ListUserCredsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_user_creds" not in self._stubs: - self._stubs["list_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListUserCreds", - request_serializer=firestore_admin.ListUserCredsRequest.serialize, - response_deserializer=firestore_admin.ListUserCredsResponse.deserialize, - ) - return self._stubs["list_user_creds"] - - @property - def enable_user_creds( - self, - ) -> Callable[ - [firestore_admin.EnableUserCredsRequest], Awaitable[user_creds.UserCreds] - ]: - r"""Return a callable for the enable user creds method over gRPC. - - Enables a user creds. No-op if the user creds are - already enabled. - - Returns: - Callable[[~.EnableUserCredsRequest], - Awaitable[~.UserCreds]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "enable_user_creds" not in self._stubs: - self._stubs["enable_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/EnableUserCreds", - request_serializer=firestore_admin.EnableUserCredsRequest.serialize, - response_deserializer=user_creds.UserCreds.deserialize, - ) - return self._stubs["enable_user_creds"] - - @property - def disable_user_creds( - self, - ) -> Callable[ - [firestore_admin.DisableUserCredsRequest], Awaitable[user_creds.UserCreds] - ]: - r"""Return a callable for the disable user creds method over gRPC. - - Disables a user creds. No-op if the user creds are - already disabled. - - Returns: - Callable[[~.DisableUserCredsRequest], - Awaitable[~.UserCreds]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "disable_user_creds" not in self._stubs: - self._stubs["disable_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DisableUserCreds", - request_serializer=firestore_admin.DisableUserCredsRequest.serialize, - response_deserializer=user_creds.UserCreds.deserialize, - ) - return self._stubs["disable_user_creds"] - - @property - def reset_user_password( - self, - ) -> Callable[ - [firestore_admin.ResetUserPasswordRequest], Awaitable[user_creds.UserCreds] - ]: - r"""Return a callable for the reset user password method over gRPC. - - Resets the password of a user creds. - - Returns: - Callable[[~.ResetUserPasswordRequest], - Awaitable[~.UserCreds]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "reset_user_password" not in self._stubs: - self._stubs["reset_user_password"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ResetUserPassword", - request_serializer=firestore_admin.ResetUserPasswordRequest.serialize, - response_deserializer=user_creds.UserCreds.deserialize, - ) - return self._stubs["reset_user_password"] - - @property - def delete_user_creds( - self, - ) -> Callable[[firestore_admin.DeleteUserCredsRequest], Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete user creds method over gRPC. - - Deletes a user creds. - - Returns: - Callable[[~.DeleteUserCredsRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_user_creds" not in self._stubs: - self._stubs["delete_user_creds"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DeleteUserCreds", - request_serializer=firestore_admin.DeleteUserCredsRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_user_creds"] - - @property - def get_backup( - self, - ) -> Callable[[firestore_admin.GetBackupRequest], Awaitable[backup.Backup]]: - r"""Return a callable for the get backup method over gRPC. - - Gets information about a backup. - - Returns: - Callable[[~.GetBackupRequest], - Awaitable[~.Backup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_backup" not in self._stubs: - self._stubs["get_backup"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetBackup", - request_serializer=firestore_admin.GetBackupRequest.serialize, - response_deserializer=backup.Backup.deserialize, - ) - return self._stubs["get_backup"] - - @property - def list_backups( - self, - ) -> Callable[ - [firestore_admin.ListBackupsRequest], - Awaitable[firestore_admin.ListBackupsResponse], - ]: - r"""Return a callable for the list backups method over gRPC. - - Lists all the backups. - - Returns: - Callable[[~.ListBackupsRequest], - Awaitable[~.ListBackupsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_backups" not in self._stubs: - self._stubs["list_backups"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListBackups", - request_serializer=firestore_admin.ListBackupsRequest.serialize, - response_deserializer=firestore_admin.ListBackupsResponse.deserialize, - ) - return self._stubs["list_backups"] - - @property - def delete_backup( - self, - ) -> Callable[[firestore_admin.DeleteBackupRequest], Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a backup. - - Returns: - Callable[[~.DeleteBackupRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_backup" not in self._stubs: - self._stubs["delete_backup"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DeleteBackup", - request_serializer=firestore_admin.DeleteBackupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_backup"] - - @property - def restore_database( - self, - ) -> Callable[ - [firestore_admin.RestoreDatabaseRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the restore database method over gRPC. - - Creates a new database by restoring from an existing backup. - - The new database must be in the same cloud region or - multi-region location as the existing backup. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing backup. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the restore, with the - Operation's [metadata][google.longrunning.Operation.metadata] - field type being the - [RestoreDatabaseMetadata][google.firestore.admin.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the - restore was successful. The new database is not readable or - writeable until the LRO has completed. - - Returns: - Callable[[~.RestoreDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "restore_database" not in self._stubs: - self._stubs["restore_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/RestoreDatabase", - request_serializer=firestore_admin.RestoreDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["restore_database"] - - @property - def create_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.CreateBackupScheduleRequest], - Awaitable[schedule.BackupSchedule], - ]: - r"""Return a callable for the create backup schedule method over gRPC. - - Creates a backup schedule on a database. - At most two backup schedules can be configured on a - database, one daily backup schedule and one weekly - backup schedule. - - Returns: - Callable[[~.CreateBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_backup_schedule" not in self._stubs: - self._stubs["create_backup_schedule"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/CreateBackupSchedule", - request_serializer=firestore_admin.CreateBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs["create_backup_schedule"] - - @property - def get_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.GetBackupScheduleRequest], Awaitable[schedule.BackupSchedule] - ]: - r"""Return a callable for the get backup schedule method over gRPC. - - Gets information about a backup schedule. - - Returns: - Callable[[~.GetBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_backup_schedule" not in self._stubs: - self._stubs["get_backup_schedule"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/GetBackupSchedule", - request_serializer=firestore_admin.GetBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs["get_backup_schedule"] - - @property - def list_backup_schedules( - self, - ) -> Callable[ - [firestore_admin.ListBackupSchedulesRequest], - Awaitable[firestore_admin.ListBackupSchedulesResponse], - ]: - r"""Return a callable for the list backup schedules method over gRPC. - - List backup schedules. - - Returns: - Callable[[~.ListBackupSchedulesRequest], - Awaitable[~.ListBackupSchedulesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_backup_schedules" not in self._stubs: - self._stubs["list_backup_schedules"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/ListBackupSchedules", - request_serializer=firestore_admin.ListBackupSchedulesRequest.serialize, - response_deserializer=firestore_admin.ListBackupSchedulesResponse.deserialize, - ) - return self._stubs["list_backup_schedules"] - - @property - def update_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.UpdateBackupScheduleRequest], - Awaitable[schedule.BackupSchedule], - ]: - r"""Return a callable for the update backup schedule method over gRPC. - - Updates a backup schedule. - - Returns: - Callable[[~.UpdateBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_backup_schedule" not in self._stubs: - self._stubs["update_backup_schedule"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/UpdateBackupSchedule", - request_serializer=firestore_admin.UpdateBackupScheduleRequest.serialize, - response_deserializer=schedule.BackupSchedule.deserialize, - ) - return self._stubs["update_backup_schedule"] - - @property - def delete_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.DeleteBackupScheduleRequest], Awaitable[empty_pb2.Empty] - ]: - r"""Return a callable for the delete backup schedule method over gRPC. - - Deletes a backup schedule. - - Returns: - Callable[[~.DeleteBackupScheduleRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_backup_schedule" not in self._stubs: - self._stubs["delete_backup_schedule"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/DeleteBackupSchedule", - request_serializer=firestore_admin.DeleteBackupScheduleRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_backup_schedule"] - - @property - def clone_database( - self, - ) -> Callable[ - [firestore_admin.CloneDatabaseRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the clone database method over gRPC. - - Creates a new database by cloning an existing one. - - The new database must be in the same cloud region or - multi-region location as the existing database. This behaves - similar to - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase] - except instead of creating a new empty database, a new database - is created with the database type, index configuration, and - documents from an existing database. - - The [long-running operation][google.longrunning.Operation] can - be used to track the progress of the clone, with the Operation's - [metadata][google.longrunning.Operation.metadata] field type - being the - [CloneDatabaseMetadata][google.firestore.admin.v1.CloneDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - the [Database][google.firestore.admin.v1.Database] if the clone - was successful. The new database is not readable or writeable - until the LRO has completed. - - Returns: - Callable[[~.CloneDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "clone_database" not in self._stubs: - self._stubs["clone_database"] = self._logged_channel.unary_unary( - "/google.firestore.admin.v1.FirestoreAdmin/CloneDatabase", - request_serializer=firestore_admin.CloneDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["clone_database"] - - def _prep_wrapped_messages(self, client_info): - """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_index: self._wrap_method( - self.create_index, - default_timeout=60.0, - client_info=client_info, - ), - self.list_indexes: self._wrap_method( - self.list_indexes, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_index: self._wrap_method( - self.get_index, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_index: self._wrap_method( - self.delete_index, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_field: self._wrap_method( - self.get_field, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_field: self._wrap_method( - self.update_field, - default_timeout=60.0, - client_info=client_info, - ), - self.list_fields: self._wrap_method( - self.list_fields, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.export_documents: self._wrap_method( - self.export_documents, - default_timeout=60.0, - client_info=client_info, - ), - self.import_documents: self._wrap_method( - self.import_documents, - default_timeout=60.0, - client_info=client_info, - ), - self.bulk_delete_documents: self._wrap_method( - self.bulk_delete_documents, - default_timeout=60.0, - client_info=client_info, - ), - self.create_database: self._wrap_method( - self.create_database, - default_timeout=120.0, - client_info=client_info, - ), - self.get_database: self._wrap_method( - self.get_database, - default_timeout=None, - client_info=client_info, - ), - self.list_databases: self._wrap_method( - self.list_databases, - default_timeout=None, - client_info=client_info, - ), - self.update_database: self._wrap_method( - self.update_database, - default_timeout=None, - client_info=client_info, - ), - self.delete_database: self._wrap_method( - self.delete_database, - default_timeout=None, - client_info=client_info, - ), - self.create_user_creds: self._wrap_method( - self.create_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.get_user_creds: self._wrap_method( - self.get_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.list_user_creds: self._wrap_method( - self.list_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.enable_user_creds: self._wrap_method( - self.enable_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.disable_user_creds: self._wrap_method( - self.disable_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.reset_user_password: self._wrap_method( - self.reset_user_password, - default_timeout=None, - client_info=client_info, - ), - self.delete_user_creds: self._wrap_method( - self.delete_user_creds, - default_timeout=None, - client_info=client_info, - ), - self.get_backup: self._wrap_method( - self.get_backup, - default_timeout=None, - client_info=client_info, - ), - self.list_backups: self._wrap_method( - self.list_backups, - default_timeout=None, - client_info=client_info, - ), - self.delete_backup: self._wrap_method( - self.delete_backup, - default_timeout=None, - client_info=client_info, - ), - self.restore_database: self._wrap_method( - self.restore_database, - default_timeout=120.0, - client_info=client_info, - ), - self.create_backup_schedule: self._wrap_method( - self.create_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - self.get_backup_schedule: self._wrap_method( - self.get_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - self.list_backup_schedules: self._wrap_method( - self.list_backup_schedules, - default_timeout=None, - client_info=client_info, - ), - self.update_backup_schedule: self._wrap_method( - self.update_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - self.delete_backup_schedule: self._wrap_method( - self.delete_backup_schedule, - default_timeout=None, - client_info=client_info, - ), - self.clone_database: self._wrap_method( - self.clone_database, - default_timeout=120.0, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse - ]: - r"""Return a callable for the list_operations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ("FirestoreAdminGrpcAsyncIOTransport",) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py deleted file mode 100644 index 41e819c875..0000000000 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py +++ /dev/null @@ -1,7500 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.firestore_admin_v1.types import user_creds -from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseFirestoreAdminRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class FirestoreAdminRestInterceptor: - """Interceptor for FirestoreAdmin. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the FirestoreAdminRestTransport. - - .. code-block:: python - class MyCustomFirestoreAdminInterceptor(FirestoreAdminRestInterceptor): - def pre_bulk_delete_documents(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_bulk_delete_documents(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_clone_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_clone_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_index(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_index(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_user_creds(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_user_creds(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_index(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_user_creds(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_disable_user_creds(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_disable_user_creds(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_enable_user_creds(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_enable_user_creds(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_export_documents(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_export_documents(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_field(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_field(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_index(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_index(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_user_creds(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_user_creds(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_import_documents(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_import_documents(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backups(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backups(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backup_schedules(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backup_schedules(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_databases(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_databases(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_fields(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_fields(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_indexes(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_indexes(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_user_creds(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_user_creds(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_reset_user_password(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_reset_user_password(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_restore_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_restore_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_field(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_field(self, response): - logging.log(f"Received response: {response}") - return response - - transport = FirestoreAdminRestTransport(interceptor=MyCustomFirestoreAdminInterceptor()) - client = FirestoreAdminClient(transport=transport) - - - """ - - def pre_bulk_delete_documents( - self, - request: firestore_admin.BulkDeleteDocumentsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.BulkDeleteDocumentsRequest, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Pre-rpc interceptor for bulk_delete_documents - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_bulk_delete_documents( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for bulk_delete_documents - - DEPRECATED. Please use the `post_bulk_delete_documents_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_bulk_delete_documents` interceptor runs - before the `post_bulk_delete_documents_with_metadata` interceptor. - """ - return response - - def post_bulk_delete_documents_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for bulk_delete_documents - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_bulk_delete_documents_with_metadata` - interceptor in new development instead of the `post_bulk_delete_documents` interceptor. - When both interceptors are used, this `post_bulk_delete_documents_with_metadata` interceptor runs after the - `post_bulk_delete_documents` interceptor. The (possibly modified) response returned by - `post_bulk_delete_documents` will be passed to - `post_bulk_delete_documents_with_metadata`. - """ - return response, metadata - - def pre_clone_database( - self, - request: firestore_admin.CloneDatabaseRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.CloneDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for clone_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_clone_database( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for clone_database - - DEPRECATED. Please use the `post_clone_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_clone_database` interceptor runs - before the `post_clone_database_with_metadata` interceptor. - """ - return response - - def post_clone_database_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for clone_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_clone_database_with_metadata` - interceptor in new development instead of the `post_clone_database` interceptor. - When both interceptors are used, this `post_clone_database_with_metadata` interceptor runs after the - `post_clone_database` interceptor. The (possibly modified) response returned by - `post_clone_database` will be passed to - `post_clone_database_with_metadata`. - """ - return response, metadata - - def pre_create_backup_schedule( - self, - request: firestore_admin.CreateBackupScheduleRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.CreateBackupScheduleRequest, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Pre-rpc interceptor for create_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_create_backup_schedule( - self, response: schedule.BackupSchedule - ) -> schedule.BackupSchedule: - """Post-rpc interceptor for create_backup_schedule - - DEPRECATED. Please use the `post_create_backup_schedule_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_create_backup_schedule` interceptor runs - before the `post_create_backup_schedule_with_metadata` interceptor. - """ - return response - - def post_create_backup_schedule_with_metadata( - self, - response: schedule.BackupSchedule, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_backup_schedule - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_create_backup_schedule_with_metadata` - interceptor in new development instead of the `post_create_backup_schedule` interceptor. - When both interceptors are used, this `post_create_backup_schedule_with_metadata` interceptor runs after the - `post_create_backup_schedule` interceptor. The (possibly modified) response returned by - `post_create_backup_schedule` will be passed to - `post_create_backup_schedule_with_metadata`. - """ - return response, metadata - - def pre_create_database( - self, - request: firestore_admin.CreateDatabaseRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.CreateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for create_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_create_database( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_database - - DEPRECATED. Please use the `post_create_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_create_database` interceptor runs - before the `post_create_database_with_metadata` interceptor. - """ - return response - - def post_create_database_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_create_database_with_metadata` - interceptor in new development instead of the `post_create_database` interceptor. - When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the - `post_create_database` interceptor. The (possibly modified) response returned by - `post_create_database` will be passed to - `post_create_database_with_metadata`. - """ - return response, metadata - - def pre_create_index( - self, - request: firestore_admin.CreateIndexRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.CreateIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for create_index - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_create_index( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_index - - DEPRECATED. Please use the `post_create_index_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_create_index` interceptor runs - before the `post_create_index_with_metadata` interceptor. - """ - return response - - def post_create_index_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_index - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_create_index_with_metadata` - interceptor in new development instead of the `post_create_index` interceptor. - When both interceptors are used, this `post_create_index_with_metadata` interceptor runs after the - `post_create_index` interceptor. The (possibly modified) response returned by - `post_create_index` will be passed to - `post_create_index_with_metadata`. - """ - return response, metadata - - def pre_create_user_creds( - self, - request: firestore_admin.CreateUserCredsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.CreateUserCredsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for create_user_creds - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_create_user_creds( - self, response: gfa_user_creds.UserCreds - ) -> gfa_user_creds.UserCreds: - """Post-rpc interceptor for create_user_creds - - DEPRECATED. Please use the `post_create_user_creds_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_create_user_creds` interceptor runs - before the `post_create_user_creds_with_metadata` interceptor. - """ - return response - - def post_create_user_creds_with_metadata( - self, - response: gfa_user_creds.UserCreds, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[gfa_user_creds.UserCreds, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_user_creds - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_create_user_creds_with_metadata` - interceptor in new development instead of the `post_create_user_creds` interceptor. - When both interceptors are used, this `post_create_user_creds_with_metadata` interceptor runs after the - `post_create_user_creds` interceptor. The (possibly modified) response returned by - `post_create_user_creds` will be passed to - `post_create_user_creds_with_metadata`. - """ - return response, metadata - - def pre_delete_backup( - self, - request: firestore_admin.DeleteBackupRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.DeleteBackupRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for delete_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def pre_delete_backup_schedule( - self, - request: firestore_admin.DeleteBackupScheduleRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.DeleteBackupScheduleRequest, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Pre-rpc interceptor for delete_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def pre_delete_database( - self, - request: firestore_admin.DeleteDatabaseRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.DeleteDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for delete_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_delete_database( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_database - - DEPRECATED. Please use the `post_delete_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_delete_database` interceptor runs - before the `post_delete_database_with_metadata` interceptor. - """ - return response - - def post_delete_database_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_delete_database_with_metadata` - interceptor in new development instead of the `post_delete_database` interceptor. - When both interceptors are used, this `post_delete_database_with_metadata` interceptor runs after the - `post_delete_database` interceptor. The (possibly modified) response returned by - `post_delete_database` will be passed to - `post_delete_database_with_metadata`. - """ - return response, metadata - - def pre_delete_index( - self, - request: firestore_admin.DeleteIndexRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.DeleteIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for delete_index - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def pre_delete_user_creds( - self, - request: firestore_admin.DeleteUserCredsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.DeleteUserCredsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for delete_user_creds - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def pre_disable_user_creds( - self, - request: firestore_admin.DisableUserCredsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.DisableUserCredsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for disable_user_creds - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_disable_user_creds( - self, response: user_creds.UserCreds - ) -> user_creds.UserCreds: - """Post-rpc interceptor for disable_user_creds - - DEPRECATED. Please use the `post_disable_user_creds_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_disable_user_creds` interceptor runs - before the `post_disable_user_creds_with_metadata` interceptor. - """ - return response - - def post_disable_user_creds_with_metadata( - self, - response: user_creds.UserCreds, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[user_creds.UserCreds, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for disable_user_creds - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_disable_user_creds_with_metadata` - interceptor in new development instead of the `post_disable_user_creds` interceptor. - When both interceptors are used, this `post_disable_user_creds_with_metadata` interceptor runs after the - `post_disable_user_creds` interceptor. The (possibly modified) response returned by - `post_disable_user_creds` will be passed to - `post_disable_user_creds_with_metadata`. - """ - return response, metadata - - def pre_enable_user_creds( - self, - request: firestore_admin.EnableUserCredsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.EnableUserCredsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for enable_user_creds - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_enable_user_creds( - self, response: user_creds.UserCreds - ) -> user_creds.UserCreds: - """Post-rpc interceptor for enable_user_creds - - DEPRECATED. Please use the `post_enable_user_creds_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_enable_user_creds` interceptor runs - before the `post_enable_user_creds_with_metadata` interceptor. - """ - return response - - def post_enable_user_creds_with_metadata( - self, - response: user_creds.UserCreds, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[user_creds.UserCreds, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for enable_user_creds - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_enable_user_creds_with_metadata` - interceptor in new development instead of the `post_enable_user_creds` interceptor. - When both interceptors are used, this `post_enable_user_creds_with_metadata` interceptor runs after the - `post_enable_user_creds` interceptor. The (possibly modified) response returned by - `post_enable_user_creds` will be passed to - `post_enable_user_creds_with_metadata`. - """ - return response, metadata - - def pre_export_documents( - self, - request: firestore_admin.ExportDocumentsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ExportDocumentsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for export_documents - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_export_documents( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for export_documents - - DEPRECATED. Please use the `post_export_documents_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_export_documents` interceptor runs - before the `post_export_documents_with_metadata` interceptor. - """ - return response - - def post_export_documents_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for export_documents - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_export_documents_with_metadata` - interceptor in new development instead of the `post_export_documents` interceptor. - When both interceptors are used, this `post_export_documents_with_metadata` interceptor runs after the - `post_export_documents` interceptor. The (possibly modified) response returned by - `post_export_documents` will be passed to - `post_export_documents_with_metadata`. - """ - return response, metadata - - def pre_get_backup( - self, - request: firestore_admin.GetBackupRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.GetBackupRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for get_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_backup(self, response: backup.Backup) -> backup.Backup: - """Post-rpc interceptor for get_backup - - DEPRECATED. Please use the `post_get_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_get_backup` interceptor runs - before the `post_get_backup_with_metadata` interceptor. - """ - return response - - def post_get_backup_with_metadata( - self, response: backup.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[backup.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_get_backup_with_metadata` - interceptor in new development instead of the `post_get_backup` interceptor. - When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the - `post_get_backup` interceptor. The (possibly modified) response returned by - `post_get_backup` will be passed to - `post_get_backup_with_metadata`. - """ - return response, metadata - - def pre_get_backup_schedule( - self, - request: firestore_admin.GetBackupScheduleRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.GetBackupScheduleRequest, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Pre-rpc interceptor for get_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_backup_schedule( - self, response: schedule.BackupSchedule - ) -> schedule.BackupSchedule: - """Post-rpc interceptor for get_backup_schedule - - DEPRECATED. Please use the `post_get_backup_schedule_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_get_backup_schedule` interceptor runs - before the `post_get_backup_schedule_with_metadata` interceptor. - """ - return response - - def post_get_backup_schedule_with_metadata( - self, - response: schedule.BackupSchedule, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_backup_schedule - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_get_backup_schedule_with_metadata` - interceptor in new development instead of the `post_get_backup_schedule` interceptor. - When both interceptors are used, this `post_get_backup_schedule_with_metadata` interceptor runs after the - `post_get_backup_schedule` interceptor. The (possibly modified) response returned by - `post_get_backup_schedule` will be passed to - `post_get_backup_schedule_with_metadata`. - """ - return response, metadata - - def pre_get_database( - self, - request: firestore_admin.GetDatabaseRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.GetDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for get_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_database(self, response: database.Database) -> database.Database: - """Post-rpc interceptor for get_database - - DEPRECATED. Please use the `post_get_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_get_database` interceptor runs - before the `post_get_database_with_metadata` interceptor. - """ - return response - - def post_get_database_with_metadata( - self, - response: database.Database, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[database.Database, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_get_database_with_metadata` - interceptor in new development instead of the `post_get_database` interceptor. - When both interceptors are used, this `post_get_database_with_metadata` interceptor runs after the - `post_get_database` interceptor. The (possibly modified) response returned by - `post_get_database` will be passed to - `post_get_database_with_metadata`. - """ - return response, metadata - - def pre_get_field( - self, - request: firestore_admin.GetFieldRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.GetFieldRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for get_field - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_field(self, response: field.Field) -> field.Field: - """Post-rpc interceptor for get_field - - DEPRECATED. Please use the `post_get_field_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_get_field` interceptor runs - before the `post_get_field_with_metadata` interceptor. - """ - return response - - def post_get_field_with_metadata( - self, response: field.Field, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[field.Field, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_field - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_get_field_with_metadata` - interceptor in new development instead of the `post_get_field` interceptor. - When both interceptors are used, this `post_get_field_with_metadata` interceptor runs after the - `post_get_field` interceptor. The (possibly modified) response returned by - `post_get_field` will be passed to - `post_get_field_with_metadata`. - """ - return response, metadata - - def pre_get_index( - self, - request: firestore_admin.GetIndexRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.GetIndexRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for get_index - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_index(self, response: index.Index) -> index.Index: - """Post-rpc interceptor for get_index - - DEPRECATED. Please use the `post_get_index_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_get_index` interceptor runs - before the `post_get_index_with_metadata` interceptor. - """ - return response - - def post_get_index_with_metadata( - self, response: index.Index, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[index.Index, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_index - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_get_index_with_metadata` - interceptor in new development instead of the `post_get_index` interceptor. - When both interceptors are used, this `post_get_index_with_metadata` interceptor runs after the - `post_get_index` interceptor. The (possibly modified) response returned by - `post_get_index` will be passed to - `post_get_index_with_metadata`. - """ - return response, metadata - - def pre_get_user_creds( - self, - request: firestore_admin.GetUserCredsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.GetUserCredsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for get_user_creds - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_user_creds( - self, response: user_creds.UserCreds - ) -> user_creds.UserCreds: - """Post-rpc interceptor for get_user_creds - - DEPRECATED. Please use the `post_get_user_creds_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_get_user_creds` interceptor runs - before the `post_get_user_creds_with_metadata` interceptor. - """ - return response - - def post_get_user_creds_with_metadata( - self, - response: user_creds.UserCreds, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[user_creds.UserCreds, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_user_creds - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_get_user_creds_with_metadata` - interceptor in new development instead of the `post_get_user_creds` interceptor. - When both interceptors are used, this `post_get_user_creds_with_metadata` interceptor runs after the - `post_get_user_creds` interceptor. The (possibly modified) response returned by - `post_get_user_creds` will be passed to - `post_get_user_creds_with_metadata`. - """ - return response, metadata - - def pre_import_documents( - self, - request: firestore_admin.ImportDocumentsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ImportDocumentsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for import_documents - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_import_documents( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for import_documents - - DEPRECATED. Please use the `post_import_documents_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_import_documents` interceptor runs - before the `post_import_documents_with_metadata` interceptor. - """ - return response - - def post_import_documents_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for import_documents - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_import_documents_with_metadata` - interceptor in new development instead of the `post_import_documents` interceptor. - When both interceptors are used, this `post_import_documents_with_metadata` interceptor runs after the - `post_import_documents` interceptor. The (possibly modified) response returned by - `post_import_documents` will be passed to - `post_import_documents_with_metadata`. - """ - return response, metadata - - def pre_list_backups( - self, - request: firestore_admin.ListBackupsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListBackupsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for list_backups - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_backups( - self, response: firestore_admin.ListBackupsResponse - ) -> firestore_admin.ListBackupsResponse: - """Post-rpc interceptor for list_backups - - DEPRECATED. Please use the `post_list_backups_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_list_backups` interceptor runs - before the `post_list_backups_with_metadata` interceptor. - """ - return response - - def post_list_backups_with_metadata( - self, - response: firestore_admin.ListBackupsResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for list_backups - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_list_backups_with_metadata` - interceptor in new development instead of the `post_list_backups` interceptor. - When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the - `post_list_backups` interceptor. The (possibly modified) response returned by - `post_list_backups` will be passed to - `post_list_backups_with_metadata`. - """ - return response, metadata - - def pre_list_backup_schedules( - self, - request: firestore_admin.ListBackupSchedulesRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListBackupSchedulesRequest, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Pre-rpc interceptor for list_backup_schedules - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_backup_schedules( - self, response: firestore_admin.ListBackupSchedulesResponse - ) -> firestore_admin.ListBackupSchedulesResponse: - """Post-rpc interceptor for list_backup_schedules - - DEPRECATED. Please use the `post_list_backup_schedules_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_list_backup_schedules` interceptor runs - before the `post_list_backup_schedules_with_metadata` interceptor. - """ - return response - - def post_list_backup_schedules_with_metadata( - self, - response: firestore_admin.ListBackupSchedulesResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListBackupSchedulesResponse, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Post-rpc interceptor for list_backup_schedules - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_list_backup_schedules_with_metadata` - interceptor in new development instead of the `post_list_backup_schedules` interceptor. - When both interceptors are used, this `post_list_backup_schedules_with_metadata` interceptor runs after the - `post_list_backup_schedules` interceptor. The (possibly modified) response returned by - `post_list_backup_schedules` will be passed to - `post_list_backup_schedules_with_metadata`. - """ - return response, metadata - - def pre_list_databases( - self, - request: firestore_admin.ListDatabasesRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListDatabasesRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for list_databases - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_databases( - self, response: firestore_admin.ListDatabasesResponse - ) -> firestore_admin.ListDatabasesResponse: - """Post-rpc interceptor for list_databases - - DEPRECATED. Please use the `post_list_databases_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_list_databases` interceptor runs - before the `post_list_databases_with_metadata` interceptor. - """ - return response - - def post_list_databases_with_metadata( - self, - response: firestore_admin.ListDatabasesResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for list_databases - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_list_databases_with_metadata` - interceptor in new development instead of the `post_list_databases` interceptor. - When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the - `post_list_databases` interceptor. The (possibly modified) response returned by - `post_list_databases` will be passed to - `post_list_databases_with_metadata`. - """ - return response, metadata - - def pre_list_fields( - self, - request: firestore_admin.ListFieldsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListFieldsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for list_fields - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_fields( - self, response: firestore_admin.ListFieldsResponse - ) -> firestore_admin.ListFieldsResponse: - """Post-rpc interceptor for list_fields - - DEPRECATED. Please use the `post_list_fields_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_list_fields` interceptor runs - before the `post_list_fields_with_metadata` interceptor. - """ - return response - - def post_list_fields_with_metadata( - self, - response: firestore_admin.ListFieldsResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListFieldsResponse, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for list_fields - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_list_fields_with_metadata` - interceptor in new development instead of the `post_list_fields` interceptor. - When both interceptors are used, this `post_list_fields_with_metadata` interceptor runs after the - `post_list_fields` interceptor. The (possibly modified) response returned by - `post_list_fields` will be passed to - `post_list_fields_with_metadata`. - """ - return response, metadata - - def pre_list_indexes( - self, - request: firestore_admin.ListIndexesRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListIndexesRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for list_indexes - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_indexes( - self, response: firestore_admin.ListIndexesResponse - ) -> firestore_admin.ListIndexesResponse: - """Post-rpc interceptor for list_indexes - - DEPRECATED. Please use the `post_list_indexes_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_list_indexes` interceptor runs - before the `post_list_indexes_with_metadata` interceptor. - """ - return response - - def post_list_indexes_with_metadata( - self, - response: firestore_admin.ListIndexesResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListIndexesResponse, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for list_indexes - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_list_indexes_with_metadata` - interceptor in new development instead of the `post_list_indexes` interceptor. - When both interceptors are used, this `post_list_indexes_with_metadata` interceptor runs after the - `post_list_indexes` interceptor. The (possibly modified) response returned by - `post_list_indexes` will be passed to - `post_list_indexes_with_metadata`. - """ - return response, metadata - - def pre_list_user_creds( - self, - request: firestore_admin.ListUserCredsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListUserCredsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for list_user_creds - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_user_creds( - self, response: firestore_admin.ListUserCredsResponse - ) -> firestore_admin.ListUserCredsResponse: - """Post-rpc interceptor for list_user_creds - - DEPRECATED. Please use the `post_list_user_creds_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_list_user_creds` interceptor runs - before the `post_list_user_creds_with_metadata` interceptor. - """ - return response - - def post_list_user_creds_with_metadata( - self, - response: firestore_admin.ListUserCredsResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ListUserCredsResponse, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for list_user_creds - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_list_user_creds_with_metadata` - interceptor in new development instead of the `post_list_user_creds` interceptor. - When both interceptors are used, this `post_list_user_creds_with_metadata` interceptor runs after the - `post_list_user_creds` interceptor. The (possibly modified) response returned by - `post_list_user_creds` will be passed to - `post_list_user_creds_with_metadata`. - """ - return response, metadata - - def pre_reset_user_password( - self, - request: firestore_admin.ResetUserPasswordRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.ResetUserPasswordRequest, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Pre-rpc interceptor for reset_user_password - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_reset_user_password( - self, response: user_creds.UserCreds - ) -> user_creds.UserCreds: - """Post-rpc interceptor for reset_user_password - - DEPRECATED. Please use the `post_reset_user_password_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_reset_user_password` interceptor runs - before the `post_reset_user_password_with_metadata` interceptor. - """ - return response - - def post_reset_user_password_with_metadata( - self, - response: user_creds.UserCreds, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[user_creds.UserCreds, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for reset_user_password - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_reset_user_password_with_metadata` - interceptor in new development instead of the `post_reset_user_password` interceptor. - When both interceptors are used, this `post_reset_user_password_with_metadata` interceptor runs after the - `post_reset_user_password` interceptor. The (possibly modified) response returned by - `post_reset_user_password` will be passed to - `post_reset_user_password_with_metadata`. - """ - return response, metadata - - def pre_restore_database( - self, - request: firestore_admin.RestoreDatabaseRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.RestoreDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for restore_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_restore_database( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for restore_database - - DEPRECATED. Please use the `post_restore_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_restore_database` interceptor runs - before the `post_restore_database_with_metadata` interceptor. - """ - return response - - def post_restore_database_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for restore_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_restore_database_with_metadata` - interceptor in new development instead of the `post_restore_database` interceptor. - When both interceptors are used, this `post_restore_database_with_metadata` interceptor runs after the - `post_restore_database` interceptor. The (possibly modified) response returned by - `post_restore_database` will be passed to - `post_restore_database_with_metadata`. - """ - return response, metadata - - def pre_update_backup_schedule( - self, - request: firestore_admin.UpdateBackupScheduleRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.UpdateBackupScheduleRequest, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Pre-rpc interceptor for update_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_update_backup_schedule( - self, response: schedule.BackupSchedule - ) -> schedule.BackupSchedule: - """Post-rpc interceptor for update_backup_schedule - - DEPRECATED. Please use the `post_update_backup_schedule_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_update_backup_schedule` interceptor runs - before the `post_update_backup_schedule_with_metadata` interceptor. - """ - return response - - def post_update_backup_schedule_with_metadata( - self, - response: schedule.BackupSchedule, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_backup_schedule - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_update_backup_schedule_with_metadata` - interceptor in new development instead of the `post_update_backup_schedule` interceptor. - When both interceptors are used, this `post_update_backup_schedule_with_metadata` interceptor runs after the - `post_update_backup_schedule` interceptor. The (possibly modified) response returned by - `post_update_backup_schedule` will be passed to - `post_update_backup_schedule_with_metadata`. - """ - return response, metadata - - def pre_update_database( - self, - request: firestore_admin.UpdateDatabaseRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.UpdateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for update_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_update_database( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_database - - DEPRECATED. Please use the `post_update_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_update_database` interceptor runs - before the `post_update_database_with_metadata` interceptor. - """ - return response - - def post_update_database_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_update_database_with_metadata` - interceptor in new development instead of the `post_update_database` interceptor. - When both interceptors are used, this `post_update_database_with_metadata` interceptor runs after the - `post_update_database` interceptor. The (possibly modified) response returned by - `post_update_database` will be passed to - `post_update_database_with_metadata`. - """ - return response, metadata - - def pre_update_field( - self, - request: firestore_admin.UpdateFieldRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore_admin.UpdateFieldRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for update_field - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_update_field( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_field - - DEPRECATED. Please use the `post_update_field_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. This `post_update_field` interceptor runs - before the `post_update_field_with_metadata` interceptor. - """ - return response - - def post_update_field_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_field - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the FirestoreAdmin server but before it is returned to user code. - - We recommend only using this `post_update_field_with_metadata` - interceptor in new development instead of the `post_update_field` interceptor. - When both interceptors are used, this `post_update_field_with_metadata` interceptor runs after the - `post_update_field` interceptor. The (possibly modified) response returned by - `post_update_field` will be passed to - `post_update_field_with_metadata`. - """ - return response, metadata - - def pre_cancel_operation( - self, - request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_cancel_operation(self, response: None) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, - request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_delete_operation(self, response: None) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, - request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the FirestoreAdmin server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the FirestoreAdmin server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class FirestoreAdminRestStub: - _session: AuthorizedSession - _host: str - _interceptor: FirestoreAdminRestInterceptor - - -class FirestoreAdminRestTransport(_BaseFirestoreAdminRestTransport): - """REST backend synchronous transport for FirestoreAdmin. - - The Cloud Firestore Admin API. - - This API provides several administrative services for Cloud - Firestore. - - Project, Database, Namespace, Collection, Collection Group, and - Document are used as defined in the Google Cloud Firestore API. - - Operation: An Operation represents work being performed in the - background. - - The index service manages Cloud Firestore indexes. - - Index creation is performed asynchronously. An Operation resource is - created for each such asynchronous operation. The state of the - operation (including any errors encountered) may be queried via the - Operation resource. - - The Operations collection provides a record of actions performed for - the specified Project (including any Operations in progress). - Operations are not created directly but through calls on other - collections or resources. - - An Operation that is done may be deleted so that it is no longer - listed as part of the Operation collection. Operations are garbage - collected after 30 days. By default, ListOperations will only return - in progress and failed operations. To list completed operation, - issue a ListOperations request with the filter ``done: true``. - - Operations are created by service ``FirestoreAdmin``, but are - accessed via service ``google.longrunning.Operations``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__( - self, - *, - host: str = "firestore.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[FirestoreAdminRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or FirestoreAdminRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - "google.longrunning.Operations.CancelOperation": [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*/operations/*}:cancel", - "body": "*", - }, - ], - "google.longrunning.Operations.DeleteOperation": [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/operations/*}", - }, - ], - "google.longrunning.Operations.GetOperation": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/operations/*}", - }, - ], - "google.longrunning.Operations.ListOperations": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*}/operations", - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1", - ) - - self._operations_client = operations_v1.AbstractOperationsClient( - transport=rest_transport - ) - - # Return the client from cache. - return self._operations_client - - class _BulkDeleteDocuments( - _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments, - FirestoreAdminRestStub, - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.BulkDeleteDocuments") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.BulkDeleteDocumentsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the bulk delete documents method over HTTP. - - Args: - request (~.firestore_admin.BulkDeleteDocumentsRequest): - The request object. The request for - [FirestoreAdmin.BulkDeleteDocuments][google.firestore.admin.v1.FirestoreAdmin.BulkDeleteDocuments]. - - When both collection_ids and namespace_ids are set, only - documents satisfying both conditions will be deleted. - - Requests with namespace_ids and collection_ids both - empty will be rejected. Please use - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase] - instead. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments._get_http_options() - ) - - request, metadata = self._interceptor.pre_bulk_delete_documents( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.BulkDeleteDocuments", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "BulkDeleteDocuments", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._BulkDeleteDocuments._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_bulk_delete_documents(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_bulk_delete_documents_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.bulk_delete_documents", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "BulkDeleteDocuments", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CloneDatabase( - _BaseFirestoreAdminRestTransport._BaseCloneDatabase, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.CloneDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.CloneDatabaseRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the clone database method over HTTP. - - Args: - request (~.firestore_admin.CloneDatabaseRequest): - The request object. The request message for - [FirestoreAdmin.CloneDatabase][google.firestore.admin.v1.FirestoreAdmin.CloneDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseCloneDatabase._get_http_options() - ) - - request, metadata = self._interceptor.pre_clone_database(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseCloneDatabase._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseCloneDatabase._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseCloneDatabase._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.CloneDatabase", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "CloneDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._CloneDatabase._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_clone_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_clone_database_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.clone_database", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "CloneDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateBackupSchedule( - _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule, - FirestoreAdminRestStub, - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.CreateBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.CreateBackupScheduleRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> schedule.BackupSchedule: - r"""Call the create backup schedule method over HTTP. - - Args: - request (~.firestore_admin.CreateBackupScheduleRequest): - The request object. The request for - [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.schedule.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule._get_http_options() - ) - - request, metadata = self._interceptor.pre_create_backup_schedule( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.CreateBackupSchedule", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "CreateBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._CreateBackupSchedule._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = schedule.BackupSchedule() - pb_resp = schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_backup_schedule(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_backup_schedule_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = schedule.BackupSchedule.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.create_backup_schedule", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "CreateBackupSchedule", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateDatabase( - _BaseFirestoreAdminRestTransport._BaseCreateDatabase, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.CreateDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.CreateDatabaseRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the create database method over HTTP. - - Args: - request (~.firestore_admin.CreateDatabaseRequest): - The request object. The request for - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseCreateDatabase._get_http_options() - ) - - request, metadata = self._interceptor.pre_create_database(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseCreateDatabase._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseCreateDatabase._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseCreateDatabase._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.CreateDatabase", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "CreateDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._CreateDatabase._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_database_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.create_database", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "CreateDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateIndex( - _BaseFirestoreAdminRestTransport._BaseCreateIndex, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.CreateIndex") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.CreateIndexRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the create index method over HTTP. - - Args: - request (~.firestore_admin.CreateIndexRequest): - The request object. The request for - [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseCreateIndex._get_http_options() - ) - - request, metadata = self._interceptor.pre_create_index(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseCreateIndex._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseCreateIndex._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseCreateIndex._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.CreateIndex", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "CreateIndex", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._CreateIndex._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_index(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_index_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.create_index", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "CreateIndex", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateUserCreds( - _BaseFirestoreAdminRestTransport._BaseCreateUserCreds, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.CreateUserCreds") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.CreateUserCredsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gfa_user_creds.UserCreds: - r"""Call the create user creds method over HTTP. - - Args: - request (~.firestore_admin.CreateUserCredsRequest): - The request object. The request for - [FirestoreAdmin.CreateUserCreds][google.firestore.admin.v1.FirestoreAdmin.CreateUserCreds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gfa_user_creds.UserCreds: - A Cloud Firestore User Creds. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseCreateUserCreds._get_http_options() - ) - - request, metadata = self._interceptor.pre_create_user_creds( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseCreateUserCreds._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseCreateUserCreds._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseCreateUserCreds._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.CreateUserCreds", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "CreateUserCreds", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._CreateUserCreds._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gfa_user_creds.UserCreds() - pb_resp = gfa_user_creds.UserCreds.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_user_creds(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_user_creds_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = gfa_user_creds.UserCreds.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.create_user_creds", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "CreateUserCreds", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteBackup( - _BaseFirestoreAdminRestTransport._BaseDeleteBackup, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.DeleteBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.DeleteBackupRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ): - r"""Call the delete backup method over HTTP. - - Args: - request (~.firestore_admin.DeleteBackupRequest): - The request object. The request for - [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseDeleteBackup._get_http_options() - ) - - request, metadata = self._interceptor.pre_delete_backup(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseDeleteBackup._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseDeleteBackup._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DeleteBackup", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "DeleteBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._DeleteBackup._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteBackupSchedule( - _BaseFirestoreAdminRestTransport._BaseDeleteBackupSchedule, - FirestoreAdminRestStub, - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.DeleteBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.DeleteBackupScheduleRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ): - r"""Call the delete backup schedule method over HTTP. - - Args: - request (~.firestore_admin.DeleteBackupScheduleRequest): - The request object. The request for - [FirestoreAdmin.DeleteBackupSchedules][]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseDeleteBackupSchedule._get_http_options() - ) - - request, metadata = self._interceptor.pre_delete_backup_schedule( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseDeleteBackupSchedule._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseDeleteBackupSchedule._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DeleteBackupSchedule", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "DeleteBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._DeleteBackupSchedule._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteDatabase( - _BaseFirestoreAdminRestTransport._BaseDeleteDatabase, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.DeleteDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.DeleteDatabaseRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete database method over HTTP. - - Args: - request (~.firestore_admin.DeleteDatabaseRequest): - The request object. The request for - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseDeleteDatabase._get_http_options() - ) - - request, metadata = self._interceptor.pre_delete_database(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseDeleteDatabase._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseDeleteDatabase._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DeleteDatabase", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "DeleteDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._DeleteDatabase._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_database_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.delete_database", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "DeleteDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteIndex( - _BaseFirestoreAdminRestTransport._BaseDeleteIndex, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.DeleteIndex") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.DeleteIndexRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ): - r"""Call the delete index method over HTTP. - - Args: - request (~.firestore_admin.DeleteIndexRequest): - The request object. The request for - [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseDeleteIndex._get_http_options() - ) - - request, metadata = self._interceptor.pre_delete_index(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseDeleteIndex._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseDeleteIndex._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DeleteIndex", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "DeleteIndex", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._DeleteIndex._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteUserCreds( - _BaseFirestoreAdminRestTransport._BaseDeleteUserCreds, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.DeleteUserCreds") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.DeleteUserCredsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ): - r"""Call the delete user creds method over HTTP. - - Args: - request (~.firestore_admin.DeleteUserCredsRequest): - The request object. The request for - [FirestoreAdmin.DeleteUserCreds][google.firestore.admin.v1.FirestoreAdmin.DeleteUserCreds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseDeleteUserCreds._get_http_options() - ) - - request, metadata = self._interceptor.pre_delete_user_creds( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseDeleteUserCreds._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseDeleteUserCreds._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DeleteUserCreds", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "DeleteUserCreds", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._DeleteUserCreds._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DisableUserCreds( - _BaseFirestoreAdminRestTransport._BaseDisableUserCreds, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.DisableUserCreds") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.DisableUserCredsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Call the disable user creds method over HTTP. - - Args: - request (~.firestore_admin.DisableUserCredsRequest): - The request object. The request for - [FirestoreAdmin.DisableUserCreds][google.firestore.admin.v1.FirestoreAdmin.DisableUserCreds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.user_creds.UserCreds: - A Cloud Firestore User Creds. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseDisableUserCreds._get_http_options() - ) - - request, metadata = self._interceptor.pre_disable_user_creds( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseDisableUserCreds._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseDisableUserCreds._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseDisableUserCreds._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DisableUserCreds", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "DisableUserCreds", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._DisableUserCreds._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = user_creds.UserCreds() - pb_resp = user_creds.UserCreds.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_disable_user_creds(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_disable_user_creds_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = user_creds.UserCreds.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.disable_user_creds", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "DisableUserCreds", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _EnableUserCreds( - _BaseFirestoreAdminRestTransport._BaseEnableUserCreds, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.EnableUserCreds") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.EnableUserCredsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Call the enable user creds method over HTTP. - - Args: - request (~.firestore_admin.EnableUserCredsRequest): - The request object. The request for - [FirestoreAdmin.EnableUserCreds][google.firestore.admin.v1.FirestoreAdmin.EnableUserCreds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.user_creds.UserCreds: - A Cloud Firestore User Creds. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseEnableUserCreds._get_http_options() - ) - - request, metadata = self._interceptor.pre_enable_user_creds( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseEnableUserCreds._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseEnableUserCreds._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseEnableUserCreds._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.EnableUserCreds", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "EnableUserCreds", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._EnableUserCreds._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = user_creds.UserCreds() - pb_resp = user_creds.UserCreds.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_enable_user_creds(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_enable_user_creds_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = user_creds.UserCreds.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.enable_user_creds", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "EnableUserCreds", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ExportDocuments( - _BaseFirestoreAdminRestTransport._BaseExportDocuments, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.ExportDocuments") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.ExportDocumentsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the export documents method over HTTP. - - Args: - request (~.firestore_admin.ExportDocumentsRequest): - The request object. The request for - [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseExportDocuments._get_http_options() - ) - - request, metadata = self._interceptor.pre_export_documents( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseExportDocuments._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseExportDocuments._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseExportDocuments._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ExportDocuments", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ExportDocuments", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._ExportDocuments._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_export_documents(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_documents_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.export_documents", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ExportDocuments", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetBackup( - _BaseFirestoreAdminRestTransport._BaseGetBackup, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.GetBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.GetBackupRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backup.Backup: - r"""Call the get backup method over HTTP. - - Args: - request (~.firestore_admin.GetBackupRequest): - The request object. The request for - [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backup.Backup: - A Backup of a Cloud Firestore - Database. - The backup contains all documents and - index configurations for the given - database at a specific point in time. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseGetBackup._get_http_options() - ) - - request, metadata = self._interceptor.pre_get_backup(request, metadata) - transcoded_request = ( - _BaseFirestoreAdminRestTransport._BaseGetBackup._get_transcoded_request( - http_options, request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreAdminRestTransport._BaseGetBackup._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetBackup", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._GetBackup._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup.Backup() - pb_resp = backup.Backup.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = backup.Backup.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.get_backup", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetBackupSchedule( - _BaseFirestoreAdminRestTransport._BaseGetBackupSchedule, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.GetBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.GetBackupScheduleRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> schedule.BackupSchedule: - r"""Call the get backup schedule method over HTTP. - - Args: - request (~.firestore_admin.GetBackupScheduleRequest): - The request object. The request for - [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.schedule.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseGetBackupSchedule._get_http_options() - ) - - request, metadata = self._interceptor.pre_get_backup_schedule( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseGetBackupSchedule._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseGetBackupSchedule._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetBackupSchedule", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._GetBackupSchedule._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = schedule.BackupSchedule() - pb_resp = schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_backup_schedule(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_schedule_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = schedule.BackupSchedule.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.get_backup_schedule", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetBackupSchedule", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDatabase( - _BaseFirestoreAdminRestTransport._BaseGetDatabase, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.GetDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.GetDatabaseRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> database.Database: - r"""Call the get database method over HTTP. - - Args: - request (~.firestore_admin.GetDatabaseRequest): - The request object. The request for - [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.database.Database: - A Cloud Firestore Database. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseGetDatabase._get_http_options() - ) - - request, metadata = self._interceptor.pre_get_database(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseGetDatabase._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseGetDatabase._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetDatabase", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._GetDatabase._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = database.Database() - pb_resp = database.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_database_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = database.Database.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.get_database", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetField( - _BaseFirestoreAdminRestTransport._BaseGetField, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.GetField") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.GetFieldRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> field.Field: - r"""Call the get field method over HTTP. - - Args: - request (~.firestore_admin.GetFieldRequest): - The request object. The request for - [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.field.Field: - Represents a single field in the - database. - Fields are grouped by their "Collection - Group", which represent all collections - in the database with the same ID. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseGetField._get_http_options() - ) - - request, metadata = self._interceptor.pre_get_field(request, metadata) - transcoded_request = ( - _BaseFirestoreAdminRestTransport._BaseGetField._get_transcoded_request( - http_options, request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreAdminRestTransport._BaseGetField._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetField", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetField", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._GetField._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = field.Field() - pb_resp = field.Field.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_field(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_field_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = field.Field.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.get_field", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetField", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetIndex( - _BaseFirestoreAdminRestTransport._BaseGetIndex, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.GetIndex") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.GetIndexRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> index.Index: - r"""Call the get index method over HTTP. - - Args: - request (~.firestore_admin.GetIndexRequest): - The request object. The request for - [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.index.Index: - Cloud Firestore indexes enable simple - and complex queries against documents in - a database. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseGetIndex._get_http_options() - ) - - request, metadata = self._interceptor.pre_get_index(request, metadata) - transcoded_request = ( - _BaseFirestoreAdminRestTransport._BaseGetIndex._get_transcoded_request( - http_options, request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreAdminRestTransport._BaseGetIndex._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetIndex", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetIndex", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._GetIndex._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = index.Index() - pb_resp = index.Index.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_index(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_index_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = index.Index.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.get_index", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetIndex", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetUserCreds( - _BaseFirestoreAdminRestTransport._BaseGetUserCreds, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.GetUserCreds") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.GetUserCredsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Call the get user creds method over HTTP. - - Args: - request (~.firestore_admin.GetUserCredsRequest): - The request object. The request for - [FirestoreAdmin.GetUserCreds][google.firestore.admin.v1.FirestoreAdmin.GetUserCreds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.user_creds.UserCreds: - A Cloud Firestore User Creds. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseGetUserCreds._get_http_options() - ) - - request, metadata = self._interceptor.pre_get_user_creds(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseGetUserCreds._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseGetUserCreds._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetUserCreds", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetUserCreds", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._GetUserCreds._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = user_creds.UserCreds() - pb_resp = user_creds.UserCreds.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_user_creds(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_user_creds_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = user_creds.UserCreds.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.get_user_creds", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetUserCreds", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ImportDocuments( - _BaseFirestoreAdminRestTransport._BaseImportDocuments, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.ImportDocuments") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.ImportDocumentsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the import documents method over HTTP. - - Args: - request (~.firestore_admin.ImportDocumentsRequest): - The request object. The request for - [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseImportDocuments._get_http_options() - ) - - request, metadata = self._interceptor.pre_import_documents( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseImportDocuments._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseImportDocuments._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseImportDocuments._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ImportDocuments", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ImportDocuments", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._ImportDocuments._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_import_documents(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_import_documents_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.import_documents", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ImportDocuments", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListBackups( - _BaseFirestoreAdminRestTransport._BaseListBackups, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.ListBackups") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.ListBackupsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListBackupsResponse: - r"""Call the list backups method over HTTP. - - Args: - request (~.firestore_admin.ListBackupsRequest): - The request object. The request for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore_admin.ListBackupsResponse: - The response for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseListBackups._get_http_options() - ) - - request, metadata = self._interceptor.pre_list_backups(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseListBackups._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseListBackups._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListBackups", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListBackups", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._ListBackups._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore_admin.ListBackupsResponse() - pb_resp = firestore_admin.ListBackupsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backups(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backups_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = firestore_admin.ListBackupsResponse.to_json( - response - ) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.list_backups", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListBackups", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListBackupSchedules( - _BaseFirestoreAdminRestTransport._BaseListBackupSchedules, - FirestoreAdminRestStub, - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.ListBackupSchedules") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.ListBackupSchedulesRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListBackupSchedulesResponse: - r"""Call the list backup schedules method over HTTP. - - Args: - request (~.firestore_admin.ListBackupSchedulesRequest): - The request object. The request for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore_admin.ListBackupSchedulesResponse: - The response for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseListBackupSchedules._get_http_options() - ) - - request, metadata = self._interceptor.pre_list_backup_schedules( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseListBackupSchedules._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseListBackupSchedules._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListBackupSchedules", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListBackupSchedules", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._ListBackupSchedules._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore_admin.ListBackupSchedulesResponse() - pb_resp = firestore_admin.ListBackupSchedulesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backup_schedules(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backup_schedules_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = ( - firestore_admin.ListBackupSchedulesResponse.to_json(response) - ) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.list_backup_schedules", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListBackupSchedules", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDatabases( - _BaseFirestoreAdminRestTransport._BaseListDatabases, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.ListDatabases") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.ListDatabasesRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListDatabasesResponse: - r"""Call the list databases method over HTTP. - - Args: - request (~.firestore_admin.ListDatabasesRequest): - The request object. A request to list the Firestore - Databases in all locations for a - project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore_admin.ListDatabasesResponse: - The list of databases for a project. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseListDatabases._get_http_options() - ) - - request, metadata = self._interceptor.pre_list_databases(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseListDatabases._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseListDatabases._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListDatabases", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListDatabases", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._ListDatabases._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore_admin.ListDatabasesResponse() - pb_resp = firestore_admin.ListDatabasesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_databases(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_databases_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = firestore_admin.ListDatabasesResponse.to_json( - response - ) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.list_databases", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListDatabases", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListFields( - _BaseFirestoreAdminRestTransport._BaseListFields, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.ListFields") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.ListFieldsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListFieldsResponse: - r"""Call the list fields method over HTTP. - - Args: - request (~.firestore_admin.ListFieldsRequest): - The request object. The request for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore_admin.ListFieldsResponse: - The response for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseListFields._get_http_options() - ) - - request, metadata = self._interceptor.pre_list_fields(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseListFields._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreAdminRestTransport._BaseListFields._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListFields", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListFields", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._ListFields._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore_admin.ListFieldsResponse() - pb_resp = firestore_admin.ListFieldsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_fields(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_fields_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = firestore_admin.ListFieldsResponse.to_json( - response - ) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.list_fields", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListFields", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListIndexes( - _BaseFirestoreAdminRestTransport._BaseListIndexes, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.ListIndexes") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.ListIndexesRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListIndexesResponse: - r"""Call the list indexes method over HTTP. - - Args: - request (~.firestore_admin.ListIndexesRequest): - The request object. The request for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore_admin.ListIndexesResponse: - The response for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseListIndexes._get_http_options() - ) - - request, metadata = self._interceptor.pre_list_indexes(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseListIndexes._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseListIndexes._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListIndexes", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListIndexes", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._ListIndexes._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore_admin.ListIndexesResponse() - pb_resp = firestore_admin.ListIndexesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_indexes(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_indexes_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = firestore_admin.ListIndexesResponse.to_json( - response - ) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.list_indexes", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListIndexes", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListUserCreds( - _BaseFirestoreAdminRestTransport._BaseListUserCreds, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.ListUserCreds") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore_admin.ListUserCredsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore_admin.ListUserCredsResponse: - r"""Call the list user creds method over HTTP. - - Args: - request (~.firestore_admin.ListUserCredsRequest): - The request object. The request for - [FirestoreAdmin.ListUserCreds][google.firestore.admin.v1.FirestoreAdmin.ListUserCreds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore_admin.ListUserCredsResponse: - The response for - [FirestoreAdmin.ListUserCreds][google.firestore.admin.v1.FirestoreAdmin.ListUserCreds]. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseListUserCreds._get_http_options() - ) - - request, metadata = self._interceptor.pre_list_user_creds(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseListUserCreds._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseListUserCreds._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListUserCreds", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListUserCreds", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._ListUserCreds._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore_admin.ListUserCredsResponse() - pb_resp = firestore_admin.ListUserCredsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_user_creds(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_user_creds_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = firestore_admin.ListUserCredsResponse.to_json( - response - ) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.list_user_creds", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListUserCreds", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ResetUserPassword( - _BaseFirestoreAdminRestTransport._BaseResetUserPassword, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.ResetUserPassword") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.ResetUserPasswordRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> user_creds.UserCreds: - r"""Call the reset user password method over HTTP. - - Args: - request (~.firestore_admin.ResetUserPasswordRequest): - The request object. The request for - [FirestoreAdmin.ResetUserPassword][google.firestore.admin.v1.FirestoreAdmin.ResetUserPassword]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.user_creds.UserCreds: - A Cloud Firestore User Creds. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseResetUserPassword._get_http_options() - ) - - request, metadata = self._interceptor.pre_reset_user_password( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseResetUserPassword._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseResetUserPassword._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseResetUserPassword._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ResetUserPassword", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ResetUserPassword", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._ResetUserPassword._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = user_creds.UserCreds() - pb_resp = user_creds.UserCreds.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_reset_user_password(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_reset_user_password_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = user_creds.UserCreds.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.reset_user_password", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ResetUserPassword", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RestoreDatabase( - _BaseFirestoreAdminRestTransport._BaseRestoreDatabase, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.RestoreDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.RestoreDatabaseRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the restore database method over HTTP. - - Args: - request (~.firestore_admin.RestoreDatabaseRequest): - The request object. The request message for - [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.FirestoreAdmin.RestoreDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseRestoreDatabase._get_http_options() - ) - - request, metadata = self._interceptor.pre_restore_database( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseRestoreDatabase._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseRestoreDatabase._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseRestoreDatabase._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.RestoreDatabase", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "RestoreDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._RestoreDatabase._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_restore_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_restore_database_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.restore_database", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "RestoreDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateBackupSchedule( - _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule, - FirestoreAdminRestStub, - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.UpdateBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.UpdateBackupScheduleRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> schedule.BackupSchedule: - r"""Call the update backup schedule method over HTTP. - - Args: - request (~.firestore_admin.UpdateBackupScheduleRequest): - The request object. The request for - [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.schedule.BackupSchedule: - A backup schedule for a Cloud - Firestore Database. - This resource is owned by the database - it is backing up, and is deleted along - with the database. The actual backups - are not though. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule._get_http_options() - ) - - request, metadata = self._interceptor.pre_update_backup_schedule( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.UpdateBackupSchedule", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "UpdateBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._UpdateBackupSchedule._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = schedule.BackupSchedule() - pb_resp = schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_backup_schedule(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_backup_schedule_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = schedule.BackupSchedule.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.update_backup_schedule", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "UpdateBackupSchedule", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDatabase( - _BaseFirestoreAdminRestTransport._BaseUpdateDatabase, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.UpdateDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.UpdateDatabaseRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the update database method over HTTP. - - Args: - request (~.firestore_admin.UpdateDatabaseRequest): - The request object. The request for - [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseUpdateDatabase._get_http_options() - ) - - request, metadata = self._interceptor.pre_update_database(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseUpdateDatabase._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseUpdateDatabase._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseUpdateDatabase._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.UpdateDatabase", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "UpdateDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._UpdateDatabase._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_database_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.update_database", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "UpdateDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateField( - _BaseFirestoreAdminRestTransport._BaseUpdateField, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.UpdateField") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore_admin.UpdateFieldRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the update field method over HTTP. - - Args: - request (~.firestore_admin.UpdateFieldRequest): - The request object. The request for - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseUpdateField._get_http_options() - ) - - request, metadata = self._interceptor.pre_update_field(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseUpdateField._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseUpdateField._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseUpdateField._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.UpdateField", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "UpdateField", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._UpdateField._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_field(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_field_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminClient.update_field", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "UpdateField", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def bulk_delete_documents( - self, - ) -> Callable[ - [firestore_admin.BulkDeleteDocumentsRequest], operations_pb2.Operation - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BulkDeleteDocuments(self._session, self._host, self._interceptor) # type: ignore - - @property - def clone_database( - self, - ) -> Callable[[firestore_admin.CloneDatabaseRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CloneDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.CreateBackupScheduleRequest], schedule.BackupSchedule - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_database( - self, - ) -> Callable[[firestore_admin.CreateDatabaseRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_index( - self, - ) -> Callable[[firestore_admin.CreateIndexRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateIndex(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_user_creds( - self, - ) -> Callable[[firestore_admin.CreateUserCredsRequest], gfa_user_creds.UserCreds]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateUserCreds(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup( - self, - ) -> Callable[[firestore_admin.DeleteBackupRequest], empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup_schedule( - self, - ) -> Callable[[firestore_admin.DeleteBackupScheduleRequest], empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_database( - self, - ) -> Callable[[firestore_admin.DeleteDatabaseRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_index( - self, - ) -> Callable[[firestore_admin.DeleteIndexRequest], empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteIndex(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_user_creds( - self, - ) -> Callable[[firestore_admin.DeleteUserCredsRequest], empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteUserCreds(self._session, self._host, self._interceptor) # type: ignore - - @property - def disable_user_creds( - self, - ) -> Callable[[firestore_admin.DisableUserCredsRequest], user_creds.UserCreds]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DisableUserCreds(self._session, self._host, self._interceptor) # type: ignore - - @property - def enable_user_creds( - self, - ) -> Callable[[firestore_admin.EnableUserCredsRequest], user_creds.UserCreds]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._EnableUserCreds(self._session, self._host, self._interceptor) # type: ignore - - @property - def export_documents( - self, - ) -> Callable[[firestore_admin.ExportDocumentsRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExportDocuments(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup(self) -> Callable[[firestore_admin.GetBackupRequest], backup.Backup]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup_schedule( - self, - ) -> Callable[[firestore_admin.GetBackupScheduleRequest], schedule.BackupSchedule]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_database( - self, - ) -> Callable[[firestore_admin.GetDatabaseRequest], database.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_field(self) -> Callable[[firestore_admin.GetFieldRequest], field.Field]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetField(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_index(self) -> Callable[[firestore_admin.GetIndexRequest], index.Index]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetIndex(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_user_creds( - self, - ) -> Callable[[firestore_admin.GetUserCredsRequest], user_creds.UserCreds]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetUserCreds(self._session, self._host, self._interceptor) # type: ignore - - @property - def import_documents( - self, - ) -> Callable[[firestore_admin.ImportDocumentsRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ImportDocuments(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backups( - self, - ) -> Callable[ - [firestore_admin.ListBackupsRequest], firestore_admin.ListBackupsResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backup_schedules( - self, - ) -> Callable[ - [firestore_admin.ListBackupSchedulesRequest], - firestore_admin.ListBackupSchedulesResponse, - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackupSchedules(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_databases( - self, - ) -> Callable[ - [firestore_admin.ListDatabasesRequest], firestore_admin.ListDatabasesResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_fields( - self, - ) -> Callable[ - [firestore_admin.ListFieldsRequest], firestore_admin.ListFieldsResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListFields(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_indexes( - self, - ) -> Callable[ - [firestore_admin.ListIndexesRequest], firestore_admin.ListIndexesResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListIndexes(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_user_creds( - self, - ) -> Callable[ - [firestore_admin.ListUserCredsRequest], firestore_admin.ListUserCredsResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListUserCreds(self._session, self._host, self._interceptor) # type: ignore - - @property - def reset_user_password( - self, - ) -> Callable[[firestore_admin.ResetUserPasswordRequest], user_creds.UserCreds]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ResetUserPassword(self._session, self._host, self._interceptor) # type: ignore - - @property - def restore_database( - self, - ) -> Callable[[firestore_admin.RestoreDatabaseRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RestoreDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_backup_schedule( - self, - ) -> Callable[ - [firestore_admin.UpdateBackupScheduleRequest], schedule.BackupSchedule - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_database( - self, - ) -> Callable[[firestore_admin.UpdateDatabaseRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_field( - self, - ) -> Callable[[firestore_admin.UpdateFieldRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateField(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation( - _BaseFirestoreAdminRestTransport._BaseCancelOperation, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: operations_pb2.CancelOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseCancelOperation._get_http_options() - ) - - request, metadata = self._interceptor.pre_cancel_operation( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseCancelOperation._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreAdminRestTransport._BaseCancelOperation._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseCancelOperation._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.CancelOperation", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._CancelOperation._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation( - _BaseFirestoreAdminRestTransport._BaseDeleteOperation, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: operations_pb2.DeleteOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseDeleteOperation._get_http_options() - ) - - request, metadata = self._interceptor.pre_delete_operation( - request, metadata - ) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseDeleteOperation._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseDeleteOperation._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.DeleteOperation", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._DeleteOperation._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation( - _BaseFirestoreAdminRestTransport._BaseGetOperation, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseGetOperation._get_http_options() - ) - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseGetOperation._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseGetOperation._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.GetOperation", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._GetOperation._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminAsyncClient.GetOperation", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations( - _BaseFirestoreAdminRestTransport._BaseListOperations, FirestoreAdminRestStub - ): - def __hash__(self): - return hash("FirestoreAdminRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: operations_pb2.ListOperationsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = ( - _BaseFirestoreAdminRestTransport._BaseListOperations._get_http_options() - ) - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseFirestoreAdminRestTransport._BaseListOperations._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = _BaseFirestoreAdminRestTransport._BaseListOperations._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore.admin_v1.FirestoreAdminClient.ListOperations", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreAdminRestTransport._ListOperations._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore.admin_v1.FirestoreAdminAsyncClient.ListOperations", - extra={ - "serviceName": "google.firestore.admin.v1.FirestoreAdmin", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("FirestoreAdminRestTransport",) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py deleted file mode 100644 index 56b6ce93f8..0000000000 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py +++ /dev/null @@ -1,1867 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.firestore_admin_v1.types import user_creds -from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseFirestoreAdminRestTransport(FirestoreAdminTransport): - """Base REST backend transport for FirestoreAdmin. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__( - self, - *, - host: str = "firestore.googleapis.com", - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - class _BaseBulkDeleteDocuments: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*}:bulkDeleteDocuments", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.BulkDeleteDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseBulkDeleteDocuments._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCloneDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*}/databases:clone", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.CloneDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseCloneDatabase._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*}/backupSchedules", - "body": "backup_schedule", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.CreateBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseCreateBackupSchedule._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "databaseId": "", - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*}/databases", - "body": "database", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.CreateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseCreateDatabase._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateIndex: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes", - "body": "index", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.CreateIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseCreateIndex._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateUserCreds: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "userCredsId": "", - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*}/userCreds", - "body": "user_creds", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.CreateUserCredsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseCreateUserCreds._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/backups/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.DeleteBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseDeleteBackup._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/backupSchedules/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.DeleteBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseDeleteBackupSchedule._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.DeleteDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseDeleteDatabase._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteIndex: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.DeleteIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseDeleteIndex._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteUserCreds: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/userCreds/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.DeleteUserCredsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseDeleteUserCreds._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDisableUserCreds: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*/userCreds/*}:disable", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.DisableUserCredsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseDisableUserCreds._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseEnableUserCreds: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*/userCreds/*}:enable", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.EnableUserCredsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseEnableUserCreds._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExportDocuments: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*}:exportDocuments", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.ExportDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseExportDocuments._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/backups/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.GetBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseGetBackup._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/backupSchedules/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.GetBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseGetBackupSchedule._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.GetDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseGetDatabase._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetField: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.GetFieldRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseGetField._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetIndex: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.GetIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseGetIndex._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetUserCreds: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/userCreds/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.GetUserCredsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseGetUserCreds._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseImportDocuments: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*}:importDocuments", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.ImportDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseImportDocuments._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListBackups: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/backups", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.ListBackupsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseListBackups._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListBackupSchedules: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/databases/*}/backupSchedules", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.ListBackupSchedulesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseListBackupSchedules._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDatabases: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*}/databases", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.ListDatabasesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseListDatabases._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListFields: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.ListFieldsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseListFields._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListIndexes: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.ListIndexesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseListIndexes._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListUserCreds: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/databases/*}/userCreds", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.ListUserCredsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseListUserCreds._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseResetUserPassword: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*/userCreds/*}:resetPassword", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.ResetUserPasswordRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseResetUserPassword._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRestoreDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*}/databases:restore", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.RestoreDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseRestoreDatabase._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{backup_schedule.name=projects/*/databases/*/backupSchedules/*}", - "body": "backup_schedule", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.UpdateBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseUpdateBackupSchedule._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{database.name=projects/*/databases/*}", - "body": "database", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.UpdateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseUpdateDatabase._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateField: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}", - "body": "field", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore_admin.UpdateFieldRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreAdminRestTransport._BaseUpdateField._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*/operations/*}:cancel", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request["body"]) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/operations/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/operations/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*}/operations", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - return query_params - - -__all__ = ("_BaseFirestoreAdminRestTransport",) diff --git a/google/cloud/firestore_admin_v1/types/__init__.py b/google/cloud/firestore_admin_v1/types/__init__.py deleted file mode 100644 index c76372e5d5..0000000000 --- a/google/cloud/firestore_admin_v1/types/__init__.py +++ /dev/null @@ -1,162 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .backup import ( - Backup, -) -from .database import ( - Database, -) -from .field import ( - Field, -) -from .firestore_admin import ( - BulkDeleteDocumentsRequest, - BulkDeleteDocumentsResponse, - CloneDatabaseRequest, - CreateBackupScheduleRequest, - CreateDatabaseMetadata, - CreateDatabaseRequest, - CreateIndexRequest, - CreateUserCredsRequest, - DeleteBackupRequest, - DeleteBackupScheduleRequest, - DeleteDatabaseMetadata, - DeleteDatabaseRequest, - DeleteIndexRequest, - DeleteUserCredsRequest, - DisableUserCredsRequest, - EnableUserCredsRequest, - ExportDocumentsRequest, - GetBackupRequest, - GetBackupScheduleRequest, - GetDatabaseRequest, - GetFieldRequest, - GetIndexRequest, - GetUserCredsRequest, - ImportDocumentsRequest, - ListBackupSchedulesRequest, - ListBackupSchedulesResponse, - ListBackupsRequest, - ListBackupsResponse, - ListDatabasesRequest, - ListDatabasesResponse, - ListFieldsRequest, - ListFieldsResponse, - ListIndexesRequest, - ListIndexesResponse, - ListUserCredsRequest, - ListUserCredsResponse, - ResetUserPasswordRequest, - RestoreDatabaseRequest, - UpdateBackupScheduleRequest, - UpdateDatabaseMetadata, - UpdateDatabaseRequest, - UpdateFieldRequest, -) -from .index import ( - Index, -) -from .location import ( - LocationMetadata, -) -from .operation import ( - BulkDeleteDocumentsMetadata, - CloneDatabaseMetadata, - ExportDocumentsMetadata, - ExportDocumentsResponse, - FieldOperationMetadata, - ImportDocumentsMetadata, - IndexOperationMetadata, - Progress, - RestoreDatabaseMetadata, - OperationState, -) -from .schedule import ( - BackupSchedule, - DailyRecurrence, - WeeklyRecurrence, -) -from .snapshot import ( - PitrSnapshot, -) -from .user_creds import ( - UserCreds, -) - -__all__ = ( - "Backup", - "Database", - "Field", - "BulkDeleteDocumentsRequest", - "BulkDeleteDocumentsResponse", - "CloneDatabaseRequest", - "CreateBackupScheduleRequest", - "CreateDatabaseMetadata", - "CreateDatabaseRequest", - "CreateIndexRequest", - "CreateUserCredsRequest", - "DeleteBackupRequest", - "DeleteBackupScheduleRequest", - "DeleteDatabaseMetadata", - "DeleteDatabaseRequest", - "DeleteIndexRequest", - "DeleteUserCredsRequest", - "DisableUserCredsRequest", - "EnableUserCredsRequest", - "ExportDocumentsRequest", - "GetBackupRequest", - "GetBackupScheduleRequest", - "GetDatabaseRequest", - "GetFieldRequest", - "GetIndexRequest", - "GetUserCredsRequest", - "ImportDocumentsRequest", - "ListBackupSchedulesRequest", - "ListBackupSchedulesResponse", - "ListBackupsRequest", - "ListBackupsResponse", - "ListDatabasesRequest", - "ListDatabasesResponse", - "ListFieldsRequest", - "ListFieldsResponse", - "ListIndexesRequest", - "ListIndexesResponse", - "ListUserCredsRequest", - "ListUserCredsResponse", - "ResetUserPasswordRequest", - "RestoreDatabaseRequest", - "UpdateBackupScheduleRequest", - "UpdateDatabaseMetadata", - "UpdateDatabaseRequest", - "UpdateFieldRequest", - "Index", - "LocationMetadata", - "BulkDeleteDocumentsMetadata", - "CloneDatabaseMetadata", - "ExportDocumentsMetadata", - "ExportDocumentsResponse", - "FieldOperationMetadata", - "ImportDocumentsMetadata", - "IndexOperationMetadata", - "Progress", - "RestoreDatabaseMetadata", - "OperationState", - "BackupSchedule", - "DailyRecurrence", - "WeeklyRecurrence", - "PitrSnapshot", - "UserCreds", -) diff --git a/google/cloud/firestore_admin_v1/types/backup.py b/google/cloud/firestore_admin_v1/types/backup.py deleted file mode 100644 index 02c594a223..0000000000 --- a/google/cloud/firestore_admin_v1/types/backup.py +++ /dev/null @@ -1,153 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.admin.v1", - manifest={ - "Backup", - }, -) - - -class Backup(proto.Message): - r"""A Backup of a Cloud Firestore Database. - - The backup contains all documents and index configurations for - the given database at a specific point in time. - - Attributes: - name (str): - Output only. The unique resource name of the Backup. - - Format is - ``projects/{project}/locations/{location}/backups/{backup}``. - database (str): - Output only. Name of the Firestore database that the backup - is from. - - Format is ``projects/{project}/databases/{database}``. - database_uid (str): - Output only. The system-generated UUID4 for - the Firestore database that the backup is from. - snapshot_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The backup contains an - externally consistent copy of the database at - this time. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which this - backup expires. - stats (google.cloud.firestore_admin_v1.types.Backup.Stats): - Output only. Statistics about the backup. - - This data only becomes available after the - backup is fully materialized to secondary - storage. This field will be empty till then. - state (google.cloud.firestore_admin_v1.types.Backup.State): - Output only. The current state of the backup. - """ - - class State(proto.Enum): - r"""Indicate the current state of the backup. - - Values: - STATE_UNSPECIFIED (0): - The state is unspecified. - CREATING (1): - The pending backup is still being created. - Operations on the backup will be rejected in - this state. - READY (2): - The backup is complete and ready to use. - NOT_AVAILABLE (3): - The backup is not available at this moment. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - NOT_AVAILABLE = 3 - - class Stats(proto.Message): - r"""Backup specific statistics. - - Attributes: - size_bytes (int): - Output only. Summation of the size of all - documents and index entries in the backup, - measured in bytes. - document_count (int): - Output only. The total number of documents - contained in the backup. - index_count (int): - Output only. The total number of index - entries contained in the backup. - """ - - size_bytes: int = proto.Field( - proto.INT64, - number=1, - ) - document_count: int = proto.Field( - proto.INT64, - number=2, - ) - index_count: int = proto.Field( - proto.INT64, - number=3, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - database: str = proto.Field( - proto.STRING, - number=2, - ) - database_uid: str = proto.Field( - proto.STRING, - number=7, - ) - snapshot_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - stats: Stats = proto.Field( - proto.MESSAGE, - number=6, - message=Stats, - ) - state: State = proto.Field( - proto.ENUM, - number=8, - enum=State, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_admin_v1/types/database.py b/google/cloud/firestore_admin_v1/types/database.py deleted file mode 100644 index f46bede62b..0000000000 --- a/google/cloud/firestore_admin_v1/types/database.py +++ /dev/null @@ -1,555 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.admin.v1", - manifest={ - "Database", - }, -) - - -class Database(proto.Message): - r"""A Cloud Firestore Database. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The resource name of the Database. Format: - ``projects/{project}/databases/{database}`` - uid (str): - Output only. The system-generated UUID4 for - this Database. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which this database was - created. Databases created before 2016 do not populate - create_time. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which this - database was most recently updated. Note this - only includes updates to the database resource - and not data contained by the database. - delete_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which this - database was deleted. Only set if the database - has been deleted. - location_id (str): - The location of the database. Available - locations are listed at - https://cloud.google.com/firestore/docs/locations. - type_ (google.cloud.firestore_admin_v1.types.Database.DatabaseType): - The type of the database. - See - https://cloud.google.com/datastore/docs/firestore-or-datastore - for information about how to choose. - concurrency_mode (google.cloud.firestore_admin_v1.types.Database.ConcurrencyMode): - The concurrency control mode to use for this - database. - version_retention_period (google.protobuf.duration_pb2.Duration): - Output only. The period during which past versions of data - are retained in the database. - - Any [read][google.firestore.v1.GetDocumentRequest.read_time] - or - [query][google.firestore.v1.ListDocumentsRequest.read_time] - can specify a ``read_time`` within this window, and will - read the state of the database at that time. - - If the PITR feature is enabled, the retention period is 7 - days. Otherwise, the retention period is 1 hour. - earliest_version_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The earliest timestamp at which older versions - of the data can be read from the database. See - [version_retention_period] above; this field is populated - with ``now - version_retention_period``. - - This value is continuously updated, and becomes stale the - moment it is queried. If you are using this value to recover - data, make sure to account for the time from the moment when - the value is queried to the moment when you initiate the - recovery. - point_in_time_recovery_enablement (google.cloud.firestore_admin_v1.types.Database.PointInTimeRecoveryEnablement): - Whether to enable the PITR feature on this - database. - app_engine_integration_mode (google.cloud.firestore_admin_v1.types.Database.AppEngineIntegrationMode): - The App Engine integration mode to use for - this database. - key_prefix (str): - Output only. The key_prefix for this database. This - key_prefix is used, in combination with the project ID ("~") - to construct the application ID that is returned from the - Cloud Datastore APIs in Google App Engine first generation - runtimes. - - This value may be empty in which case the appid to use for - URL-encoded keys is the project_id (eg: foo instead of - v~foo). - delete_protection_state (google.cloud.firestore_admin_v1.types.Database.DeleteProtectionState): - State of delete protection for the database. - cmek_config (google.cloud.firestore_admin_v1.types.Database.CmekConfig): - Optional. Presence indicates CMEK is enabled - for this database. - previous_id (str): - Output only. The database resource's prior - database ID. This field is only populated for - deleted databases. - source_info (google.cloud.firestore_admin_v1.types.Database.SourceInfo): - Output only. Information about the provenance - of this database. - tags (MutableMapping[str, str]): - Optional. Input only. Immutable. Tag - keys/values directly bound to this resource. For - example: - - "123/environment": "production", - "123/costCenter": "marketing". - free_tier (bool): - Output only. Background: Free tier is the - ability of a Firestore database to use a small - amount of resources every day without being - charged. Once usage exceeds the free tier limit - further usage is charged. - - Whether this database can make use of the free - tier. Only one database per project can be - eligible for the free tier. - - The first (or next) database that is created in - a project without a free tier database will be - marked as eligible for the free tier. Databases - that are created while there is a free tier - database will not be eligible for the free tier. - - This field is a member of `oneof`_ ``_free_tier``. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. - database_edition (google.cloud.firestore_admin_v1.types.Database.DatabaseEdition): - Immutable. The edition of the database. - """ - - class DatabaseType(proto.Enum): - r"""The type of the database. - See - https://cloud.google.com/datastore/docs/firestore-or-datastore - for information about how to choose. - - Mode changes are only allowed if the database is empty. - - Values: - DATABASE_TYPE_UNSPECIFIED (0): - Not used. - FIRESTORE_NATIVE (1): - Firestore Native Mode - DATASTORE_MODE (2): - Firestore in Datastore Mode. - """ - DATABASE_TYPE_UNSPECIFIED = 0 - FIRESTORE_NATIVE = 1 - DATASTORE_MODE = 2 - - class ConcurrencyMode(proto.Enum): - r"""The type of concurrency control mode for transactions. - - Values: - CONCURRENCY_MODE_UNSPECIFIED (0): - Not used. - OPTIMISTIC (1): - Use optimistic concurrency control by - default. This mode is available for Cloud - Firestore databases. - PESSIMISTIC (2): - Use pessimistic concurrency control by - default. This mode is available for Cloud - Firestore databases. - - This is the default setting for Cloud Firestore. - OPTIMISTIC_WITH_ENTITY_GROUPS (3): - Use optimistic concurrency control with - entity groups by default. - This is the only available mode for Cloud - Datastore. - - This mode is also available for Cloud Firestore - with Datastore Mode but is not recommended. - """ - CONCURRENCY_MODE_UNSPECIFIED = 0 - OPTIMISTIC = 1 - PESSIMISTIC = 2 - OPTIMISTIC_WITH_ENTITY_GROUPS = 3 - - class PointInTimeRecoveryEnablement(proto.Enum): - r"""Point In Time Recovery feature enablement. - - Values: - POINT_IN_TIME_RECOVERY_ENABLEMENT_UNSPECIFIED (0): - Not used. - POINT_IN_TIME_RECOVERY_ENABLED (1): - Reads are supported on selected versions of the data from - within the past 7 days: - - - Reads against any timestamp within the past hour - - Reads against 1-minute snapshots beyond 1 hour and within - 7 days - - ``version_retention_period`` and ``earliest_version_time`` - can be used to determine the supported versions. - POINT_IN_TIME_RECOVERY_DISABLED (2): - Reads are supported on any version of the - data from within the past 1 hour. - """ - POINT_IN_TIME_RECOVERY_ENABLEMENT_UNSPECIFIED = 0 - POINT_IN_TIME_RECOVERY_ENABLED = 1 - POINT_IN_TIME_RECOVERY_DISABLED = 2 - - class AppEngineIntegrationMode(proto.Enum): - r"""The type of App Engine integration mode. - - Values: - APP_ENGINE_INTEGRATION_MODE_UNSPECIFIED (0): - Not used. - ENABLED (1): - If an App Engine application exists in the - same region as this database, App Engine - configuration will impact this database. This - includes disabling of the application & - database, as well as disabling writes to the - database. - DISABLED (2): - App Engine has no effect on the ability of - this database to serve requests. - - This is the default setting for databases - created with the Firestore API. - """ - APP_ENGINE_INTEGRATION_MODE_UNSPECIFIED = 0 - ENABLED = 1 - DISABLED = 2 - - class DeleteProtectionState(proto.Enum): - r"""The delete protection state of the database. - - Values: - DELETE_PROTECTION_STATE_UNSPECIFIED (0): - The default value. Delete protection type is - not specified - DELETE_PROTECTION_DISABLED (1): - Delete protection is disabled - DELETE_PROTECTION_ENABLED (2): - Delete protection is enabled - """ - DELETE_PROTECTION_STATE_UNSPECIFIED = 0 - DELETE_PROTECTION_DISABLED = 1 - DELETE_PROTECTION_ENABLED = 2 - - class DatabaseEdition(proto.Enum): - r"""The edition of the database. - - Values: - DATABASE_EDITION_UNSPECIFIED (0): - Not used. - STANDARD (1): - Standard edition. - - This is the default setting if not specified. - ENTERPRISE (2): - Enterprise edition. - """ - DATABASE_EDITION_UNSPECIFIED = 0 - STANDARD = 1 - ENTERPRISE = 2 - - class CmekConfig(proto.Message): - r"""The CMEK (Customer Managed Encryption Key) configuration for - a Firestore database. If not present, the database is secured by - the default Google encryption key. - - Attributes: - kms_key_name (str): - Required. Only keys in the same location as this database - are allowed to be used for encryption. - - For Firestore's nam5 multi-region, this corresponds to Cloud - KMS multi-region us. For Firestore's eur3 multi-region, this - corresponds to Cloud KMS multi-region europe. See - https://cloud.google.com/kms/docs/locations. - - The expected format is - ``projects/{project_id}/locations/{kms_location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}``. - active_key_version (MutableSequence[str]): - Output only. Currently in-use `KMS key - versions `__. - During `key - rotation `__, - there can be multiple in-use key versions. - - The expected format is - ``projects/{project_id}/locations/{kms_location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{key_version}``. - """ - - kms_key_name: str = proto.Field( - proto.STRING, - number=1, - ) - active_key_version: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - class SourceInfo(proto.Message): - r"""Information about the provenance of this database. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - backup (google.cloud.firestore_admin_v1.types.Database.SourceInfo.BackupSource): - If set, this database was restored from the - specified backup (or a snapshot thereof). - - This field is a member of `oneof`_ ``source``. - operation (str): - The associated long-running operation. This field may not be - set after the operation has completed. Format: - ``projects/{project}/databases/{database}/operations/{operation}``. - """ - - class BackupSource(proto.Message): - r"""Information about a backup that was used to restore a - database. - - Attributes: - backup (str): - The resource name of the backup that was used to restore - this database. Format: - ``projects/{project}/locations/{location}/backups/{backup}``. - """ - - backup: str = proto.Field( - proto.STRING, - number=1, - ) - - backup: "Database.SourceInfo.BackupSource" = proto.Field( - proto.MESSAGE, - number=1, - oneof="source", - message="Database.SourceInfo.BackupSource", - ) - operation: str = proto.Field( - proto.STRING, - number=3, - ) - - class EncryptionConfig(proto.Message): - r"""Encryption configuration for a new database being created from - another source. - - The source could be a [Backup][google.firestore.admin.v1.Backup] . - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - google_default_encryption (google.cloud.firestore_admin_v1.types.Database.EncryptionConfig.GoogleDefaultEncryptionOptions): - Use Google default encryption. - - This field is a member of `oneof`_ ``encryption_type``. - use_source_encryption (google.cloud.firestore_admin_v1.types.Database.EncryptionConfig.SourceEncryptionOptions): - The database will use the same encryption - configuration as the source. - - This field is a member of `oneof`_ ``encryption_type``. - customer_managed_encryption (google.cloud.firestore_admin_v1.types.Database.EncryptionConfig.CustomerManagedEncryptionOptions): - Use Customer Managed Encryption Keys (CMEK) - for encryption. - - This field is a member of `oneof`_ ``encryption_type``. - """ - - class GoogleDefaultEncryptionOptions(proto.Message): - r"""The configuration options for using Google default - encryption. - - """ - - class SourceEncryptionOptions(proto.Message): - r"""The configuration options for using the same encryption - method as the source. - - """ - - class CustomerManagedEncryptionOptions(proto.Message): - r"""The configuration options for using CMEK (Customer Managed - Encryption Key) encryption. - - Attributes: - kms_key_name (str): - Required. Only keys in the same location as the database are - allowed to be used for encryption. - - For Firestore's nam5 multi-region, this corresponds to Cloud - KMS multi-region us. For Firestore's eur3 multi-region, this - corresponds to Cloud KMS multi-region europe. See - https://cloud.google.com/kms/docs/locations. - - The expected format is - ``projects/{project_id}/locations/{kms_location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}``. - """ - - kms_key_name: str = proto.Field( - proto.STRING, - number=1, - ) - - google_default_encryption: "Database.EncryptionConfig.GoogleDefaultEncryptionOptions" = proto.Field( - proto.MESSAGE, - number=1, - oneof="encryption_type", - message="Database.EncryptionConfig.GoogleDefaultEncryptionOptions", - ) - use_source_encryption: "Database.EncryptionConfig.SourceEncryptionOptions" = ( - proto.Field( - proto.MESSAGE, - number=2, - oneof="encryption_type", - message="Database.EncryptionConfig.SourceEncryptionOptions", - ) - ) - customer_managed_encryption: "Database.EncryptionConfig.CustomerManagedEncryptionOptions" = proto.Field( - proto.MESSAGE, - number=3, - oneof="encryption_type", - message="Database.EncryptionConfig.CustomerManagedEncryptionOptions", - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - delete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - location_id: str = proto.Field( - proto.STRING, - number=9, - ) - type_: DatabaseType = proto.Field( - proto.ENUM, - number=10, - enum=DatabaseType, - ) - concurrency_mode: ConcurrencyMode = proto.Field( - proto.ENUM, - number=15, - enum=ConcurrencyMode, - ) - version_retention_period: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=17, - message=duration_pb2.Duration, - ) - earliest_version_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=18, - message=timestamp_pb2.Timestamp, - ) - point_in_time_recovery_enablement: PointInTimeRecoveryEnablement = proto.Field( - proto.ENUM, - number=21, - enum=PointInTimeRecoveryEnablement, - ) - app_engine_integration_mode: AppEngineIntegrationMode = proto.Field( - proto.ENUM, - number=19, - enum=AppEngineIntegrationMode, - ) - key_prefix: str = proto.Field( - proto.STRING, - number=20, - ) - delete_protection_state: DeleteProtectionState = proto.Field( - proto.ENUM, - number=22, - enum=DeleteProtectionState, - ) - cmek_config: CmekConfig = proto.Field( - proto.MESSAGE, - number=23, - message=CmekConfig, - ) - previous_id: str = proto.Field( - proto.STRING, - number=25, - ) - source_info: SourceInfo = proto.Field( - proto.MESSAGE, - number=26, - message=SourceInfo, - ) - tags: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=29, - ) - free_tier: bool = proto.Field( - proto.BOOL, - number=30, - optional=True, - ) - etag: str = proto.Field( - proto.STRING, - number=99, - ) - database_edition: DatabaseEdition = proto.Field( - proto.ENUM, - number=28, - enum=DatabaseEdition, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_admin_v1/types/field.py b/google/cloud/firestore_admin_v1/types/field.py deleted file mode 100644 index 824a9c87f5..0000000000 --- a/google/cloud/firestore_admin_v1/types/field.py +++ /dev/null @@ -1,189 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_admin_v1.types import index - - -__protobuf__ = proto.module( - package="google.firestore.admin.v1", - manifest={ - "Field", - }, -) - - -class Field(proto.Message): - r"""Represents a single field in the database. - - Fields are grouped by their "Collection Group", which represent - all collections in the database with the same ID. - - Attributes: - name (str): - Required. A field name of the form: - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`` - - A field path can be a simple field name, e.g. ``address`` or - a path to fields within ``map_value`` , e.g. - ``address.city``, or a special field path. The only valid - special field is ``*``, which represents any field. - - Field paths can be quoted using :literal:`\`` (backtick). - The only character that must be escaped within a quoted - field path is the backtick character itself, escaped using a - backslash. Special characters in field paths that must be - quoted include: ``*``, ``.``, :literal:`\`` (backtick), - ``[``, ``]``, as well as any ascii symbolic characters. - - Examples: :literal:`\`address.city\`` represents a field - named ``address.city``, not the map key ``city`` in the - field ``address``. :literal:`\`*\`` represents a field named - ``*``, not any field. - - A special ``Field`` contains the default indexing settings - for all fields. This field's resource name is: - ``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`` - Indexes defined on this ``Field`` will be applied to all - fields which do not have their own ``Field`` index - configuration. - index_config (google.cloud.firestore_admin_v1.types.Field.IndexConfig): - The index configuration for this field. If unset, field - indexing will revert to the configuration defined by the - ``ancestor_field``. To explicitly remove all indexes for - this field, specify an index config with an empty list of - indexes. - ttl_config (google.cloud.firestore_admin_v1.types.Field.TtlConfig): - The TTL configuration for this ``Field``. Setting or - unsetting this will enable or disable the TTL for documents - that have this ``Field``. - """ - - class IndexConfig(proto.Message): - r"""The index configuration for this field. - - Attributes: - indexes (MutableSequence[google.cloud.firestore_admin_v1.types.Index]): - The indexes supported for this field. - uses_ancestor_config (bool): - Output only. When true, the ``Field``'s index configuration - is set from the configuration specified by the - ``ancestor_field``. When false, the ``Field``'s index - configuration is defined explicitly. - ancestor_field (str): - Output only. Specifies the resource name of the ``Field`` - from which this field's index configuration is set (when - ``uses_ancestor_config`` is true), or from which it *would* - be set if this field had no index configuration (when - ``uses_ancestor_config`` is false). - reverting (bool): - Output only When true, the ``Field``'s index configuration - is in the process of being reverted. Once complete, the - index config will transition to the same state as the field - specified by ``ancestor_field``, at which point - ``uses_ancestor_config`` will be ``true`` and ``reverting`` - will be ``false``. - """ - - indexes: MutableSequence[index.Index] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=index.Index, - ) - uses_ancestor_config: bool = proto.Field( - proto.BOOL, - number=2, - ) - ancestor_field: str = proto.Field( - proto.STRING, - number=3, - ) - reverting: bool = proto.Field( - proto.BOOL, - number=4, - ) - - class TtlConfig(proto.Message): - r"""The TTL (time-to-live) configuration for documents that have this - ``Field`` set. - - Storing a timestamp value into a TTL-enabled field will be treated - as the document's absolute expiration time. Timestamp values in the - past indicate that the document is eligible for immediate - expiration. Using any other data type or leaving the field absent - will disable expiration for the individual document. - - Attributes: - state (google.cloud.firestore_admin_v1.types.Field.TtlConfig.State): - Output only. The state of the TTL - configuration. - """ - - class State(proto.Enum): - r"""The state of applying the TTL configuration to all documents. - - Values: - STATE_UNSPECIFIED (0): - The state is unspecified or unknown. - CREATING (1): - The TTL is being applied. There is an active - long-running operation to track the change. - Newly written documents will have TTLs applied - as requested. Requested TTLs on existing - documents are still being processed. When TTLs - on all existing documents have been processed, - the state will move to 'ACTIVE'. - ACTIVE (2): - The TTL is active for all documents. - NEEDS_REPAIR (3): - The TTL configuration could not be enabled for all existing - documents. Newly written documents will continue to have - their TTL applied. The LRO returned when last attempting to - enable TTL for this ``Field`` has failed, and may have more - details. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - ACTIVE = 2 - NEEDS_REPAIR = 3 - - state: "Field.TtlConfig.State" = proto.Field( - proto.ENUM, - number=1, - enum="Field.TtlConfig.State", - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - index_config: IndexConfig = proto.Field( - proto.MESSAGE, - number=2, - message=IndexConfig, - ) - ttl_config: TtlConfig = proto.Field( - proto.MESSAGE, - number=3, - message=TtlConfig, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_admin_v1/types/firestore_admin.py b/google/cloud/firestore_admin_v1/types/firestore_admin.py deleted file mode 100644 index 9ede35cacf..0000000000 --- a/google/cloud/firestore_admin_v1/types/firestore_admin.py +++ /dev/null @@ -1,1150 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_admin_v1.types import backup as gfa_backup -from google.cloud.firestore_admin_v1.types import database as gfa_database -from google.cloud.firestore_admin_v1.types import field as gfa_field -from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.firestore_admin_v1.types import snapshot -from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.admin.v1", - manifest={ - "ListDatabasesRequest", - "CreateDatabaseRequest", - "CreateDatabaseMetadata", - "ListDatabasesResponse", - "GetDatabaseRequest", - "UpdateDatabaseRequest", - "UpdateDatabaseMetadata", - "DeleteDatabaseRequest", - "DeleteDatabaseMetadata", - "CreateUserCredsRequest", - "GetUserCredsRequest", - "ListUserCredsRequest", - "ListUserCredsResponse", - "EnableUserCredsRequest", - "DisableUserCredsRequest", - "ResetUserPasswordRequest", - "DeleteUserCredsRequest", - "CreateBackupScheduleRequest", - "GetBackupScheduleRequest", - "UpdateBackupScheduleRequest", - "ListBackupSchedulesRequest", - "ListBackupSchedulesResponse", - "DeleteBackupScheduleRequest", - "CreateIndexRequest", - "ListIndexesRequest", - "ListIndexesResponse", - "GetIndexRequest", - "DeleteIndexRequest", - "UpdateFieldRequest", - "GetFieldRequest", - "ListFieldsRequest", - "ListFieldsResponse", - "ExportDocumentsRequest", - "ImportDocumentsRequest", - "BulkDeleteDocumentsRequest", - "BulkDeleteDocumentsResponse", - "GetBackupRequest", - "ListBackupsRequest", - "ListBackupsResponse", - "DeleteBackupRequest", - "RestoreDatabaseRequest", - "CloneDatabaseRequest", - }, -) - - -class ListDatabasesRequest(proto.Message): - r"""A request to list the Firestore Databases in all locations - for a project. - - Attributes: - parent (str): - Required. A parent name of the form - ``projects/{project_id}`` - show_deleted (bool): - If true, also returns deleted resources. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - show_deleted: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class CreateDatabaseRequest(proto.Message): - r"""The request for - [FirestoreAdmin.CreateDatabase][google.firestore.admin.v1.FirestoreAdmin.CreateDatabase]. - - Attributes: - parent (str): - Required. A parent name of the form - ``projects/{project_id}`` - database (google.cloud.firestore_admin_v1.types.Database): - Required. The Database to create. - database_id (str): - Required. The ID to use for the database, which will become - the final component of the database's resource name. - - This value should be 4-63 characters. Valid characters are - /[a-z][0-9]-/ with first character a letter and the last a - letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - - "(default)" database ID is also valid. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - database: gfa_database.Database = proto.Field( - proto.MESSAGE, - number=2, - message=gfa_database.Database, - ) - database_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class CreateDatabaseMetadata(proto.Message): - r"""Metadata related to the create database operation.""" - - -class ListDatabasesResponse(proto.Message): - r"""The list of databases for a project. - - Attributes: - databases (MutableSequence[google.cloud.firestore_admin_v1.types.Database]): - The databases in the project. - unreachable (MutableSequence[str]): - In the event that data about individual databases cannot be - listed they will be recorded here. - - An example entry might be: - projects/some_project/locations/some_location This can - happen if the Cloud Region that the Database resides in is - currently unavailable. In this case we can't fetch all the - details about the database. You may be able to get a more - detailed error message (or possibly fetch the resource) by - sending a 'Get' request for the resource or a 'List' request - for the specific location. - """ - - databases: MutableSequence[gfa_database.Database] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gfa_database.Database, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetDatabaseRequest(proto.Message): - r"""The request for - [FirestoreAdmin.GetDatabase][google.firestore.admin.v1.FirestoreAdmin.GetDatabase]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateDatabaseRequest(proto.Message): - r"""The request for - [FirestoreAdmin.UpdateDatabase][google.firestore.admin.v1.FirestoreAdmin.UpdateDatabase]. - - Attributes: - database (google.cloud.firestore_admin_v1.types.Database): - Required. The database to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - """ - - database: gfa_database.Database = proto.Field( - proto.MESSAGE, - number=1, - message=gfa_database.Database, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class UpdateDatabaseMetadata(proto.Message): - r"""Metadata related to the update database operation.""" - - -class DeleteDatabaseRequest(proto.Message): - r"""The request for - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}`` - etag (str): - The current etag of the Database. If an etag is provided and - does not match the current etag of the database, deletion - will be blocked and a FAILED_PRECONDITION error will be - returned. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteDatabaseMetadata(proto.Message): - r"""Metadata related to the delete database operation.""" - - -class CreateUserCredsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.CreateUserCreds][google.firestore.admin.v1.FirestoreAdmin.CreateUserCreds]. - - Attributes: - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}`` - user_creds (google.cloud.firestore_admin_v1.types.UserCreds): - Required. The user creds to create. - user_creds_id (str): - Required. The ID to use for the user creds, which will - become the final component of the user creds's resource - name. - - This value should be 4-63 characters. Valid characters are - /[a-z][0-9]-/ with first character a letter and the last a - letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - user_creds: gfa_user_creds.UserCreds = proto.Field( - proto.MESSAGE, - number=2, - message=gfa_user_creds.UserCreds, - ) - user_creds_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class GetUserCredsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.GetUserCreds][google.firestore.admin.v1.FirestoreAdmin.GetUserCreds]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListUserCredsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ListUserCreds][google.firestore.admin.v1.FirestoreAdmin.ListUserCreds]. - - Attributes: - parent (str): - Required. A parent database name of the form - ``projects/{project_id}/databases/{database_id}`` - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListUserCredsResponse(proto.Message): - r"""The response for - [FirestoreAdmin.ListUserCreds][google.firestore.admin.v1.FirestoreAdmin.ListUserCreds]. - - Attributes: - user_creds (MutableSequence[google.cloud.firestore_admin_v1.types.UserCreds]): - The user creds for the database. - """ - - user_creds: MutableSequence[gfa_user_creds.UserCreds] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gfa_user_creds.UserCreds, - ) - - -class EnableUserCredsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.EnableUserCreds][google.firestore.admin.v1.FirestoreAdmin.EnableUserCreds]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DisableUserCredsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.DisableUserCreds][google.firestore.admin.v1.FirestoreAdmin.DisableUserCreds]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ResetUserPasswordRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ResetUserPassword][google.firestore.admin.v1.FirestoreAdmin.ResetUserPassword]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteUserCredsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.DeleteUserCreds][google.firestore.admin.v1.FirestoreAdmin.DeleteUserCreds]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/userCreds/{user_creds_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateBackupScheduleRequest(proto.Message): - r"""The request for - [FirestoreAdmin.CreateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.CreateBackupSchedule]. - - Attributes: - parent (str): - Required. The parent database. - - Format ``projects/{project}/databases/{database}`` - backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): - Required. The backup schedule to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - backup_schedule: schedule.BackupSchedule = proto.Field( - proto.MESSAGE, - number=2, - message=schedule.BackupSchedule, - ) - - -class GetBackupScheduleRequest(proto.Message): - r"""The request for - [FirestoreAdmin.GetBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.GetBackupSchedule]. - - Attributes: - name (str): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateBackupScheduleRequest(proto.Message): - r"""The request for - [FirestoreAdmin.UpdateBackupSchedule][google.firestore.admin.v1.FirestoreAdmin.UpdateBackupSchedule]. - - Attributes: - backup_schedule (google.cloud.firestore_admin_v1.types.BackupSchedule): - Required. The backup schedule to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - """ - - backup_schedule: schedule.BackupSchedule = proto.Field( - proto.MESSAGE, - number=1, - message=schedule.BackupSchedule, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListBackupSchedulesRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - - Attributes: - parent (str): - Required. The parent database. - - Format is ``projects/{project}/databases/{database}``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBackupSchedulesResponse(proto.Message): - r"""The response for - [FirestoreAdmin.ListBackupSchedules][google.firestore.admin.v1.FirestoreAdmin.ListBackupSchedules]. - - Attributes: - backup_schedules (MutableSequence[google.cloud.firestore_admin_v1.types.BackupSchedule]): - List of all backup schedules. - """ - - backup_schedules: MutableSequence[schedule.BackupSchedule] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=schedule.BackupSchedule, - ) - - -class DeleteBackupScheduleRequest(proto.Message): - r"""The request for [FirestoreAdmin.DeleteBackupSchedules][]. - - Attributes: - name (str): - Required. The name of the backup schedule. - - Format - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateIndexRequest(proto.Message): - r"""The request for - [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. - - Attributes: - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - index (google.cloud.firestore_admin_v1.types.Index): - Required. The composite index to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - index: gfa_index.Index = proto.Field( - proto.MESSAGE, - number=2, - message=gfa_index.Index, - ) - - -class ListIndexesRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - - Attributes: - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - filter (str): - The filter to apply to list results. - page_size (int): - The number of results to return. - page_token (str): - A page token, returned from a previous call to - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes], - that may be used to get the next page of results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListIndexesResponse(proto.Message): - r"""The response for - [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. - - Attributes: - indexes (MutableSequence[google.cloud.firestore_admin_v1.types.Index]): - The requested indexes. - next_page_token (str): - A page token that may be used to request - another page of results. If blank, this is the - last page. - """ - - @property - def raw_page(self): - return self - - indexes: MutableSequence[gfa_index.Index] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gfa_index.Index, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetIndexRequest(proto.Message): - r"""The request for - [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteIndexRequest(proto.Message): - r"""The request for - [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateFieldRequest(proto.Message): - r"""The request for - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. - - Attributes: - field (google.cloud.firestore_admin_v1.types.Field): - Required. The field to be updated. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask, relative to the field. If specified, only - configuration specified by this field_mask will be updated - in the field. - """ - - field: gfa_field.Field = proto.Field( - proto.MESSAGE, - number=1, - message=gfa_field.Field, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetFieldRequest(proto.Message): - r"""The request for - [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. - - Attributes: - name (str): - Required. A name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListFieldsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - - Attributes: - parent (str): - Required. A parent name of the form - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}`` - filter (str): - The filter to apply to list results. Currently, - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - only supports listing fields that have been explicitly - overridden. To issue this query, call - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] - with a filter that includes - ``indexConfig.usesAncestorConfig:false`` or ``ttlConfig:*``. - page_size (int): - The number of results to return. - page_token (str): - A page token, returned from a previous call to - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields], - that may be used to get the next page of results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListFieldsResponse(proto.Message): - r"""The response for - [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. - - Attributes: - fields (MutableSequence[google.cloud.firestore_admin_v1.types.Field]): - The requested fields. - next_page_token (str): - A page token that may be used to request - another page of results. If blank, this is the - last page. - """ - - @property - def raw_page(self): - return self - - fields: MutableSequence[gfa_field.Field] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gfa_field.Field, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ExportDocumentsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. - - Attributes: - name (str): - Required. Database to export. Should be of the form: - ``projects/{project_id}/databases/{database_id}``. - collection_ids (MutableSequence[str]): - Which collection IDs to export. Unspecified - means all collections. Each collection ID in - this list must be unique. - output_uri_prefix (str): - The output URI. Currently only supports Google Cloud Storage - URIs of the form: ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, - where ``BUCKET_NAME`` is the name of the Google Cloud - Storage bucket and ``NAMESPACE_PATH`` is an optional Google - Cloud Storage namespace path. When choosing a name, be sure - to consider Google Cloud Storage naming guidelines: - https://cloud.google.com/storage/docs/naming. If the URI is - a bucket (without a namespace path), a prefix will be - generated based on the start time. - namespace_ids (MutableSequence[str]): - An empty list represents all namespaces. This - is the preferred usage for databases that don't - use namespaces. - - An empty string element represents the default - namespace. This should be used if the database - has data in non-default namespaces, but doesn't - want to include them. Each namespace in this - list must be unique. - snapshot_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp that corresponds to the version of the - database to be exported. The timestamp must be in the past, - rounded to the minute and not older than - [earliestVersionTime][google.firestore.admin.v1.Database.earliest_version_time]. - If specified, then the exported documents will represent a - consistent view of the database at the provided time. - Otherwise, there are no guarantees about the consistency of - the exported documents. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - output_uri_prefix: str = proto.Field( - proto.STRING, - number=3, - ) - namespace_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - snapshot_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class ImportDocumentsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. - - Attributes: - name (str): - Required. Database to import into. Should be of the form: - ``projects/{project_id}/databases/{database_id}``. - collection_ids (MutableSequence[str]): - Which collection IDs to import. Unspecified - means all collections included in the import. - Each collection ID in this list must be unique. - input_uri_prefix (str): - Location of the exported files. This must match the - output_uri_prefix of an ExportDocumentsResponse from an - export that has completed successfully. See: - [google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix]. - namespace_ids (MutableSequence[str]): - An empty list represents all namespaces. This - is the preferred usage for databases that don't - use namespaces. - - An empty string element represents the default - namespace. This should be used if the database - has data in non-default namespaces, but doesn't - want to include them. Each namespace in this - list must be unique. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - input_uri_prefix: str = proto.Field( - proto.STRING, - number=3, - ) - namespace_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class BulkDeleteDocumentsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.BulkDeleteDocuments][google.firestore.admin.v1.FirestoreAdmin.BulkDeleteDocuments]. - - When both collection_ids and namespace_ids are set, only documents - satisfying both conditions will be deleted. - - Requests with namespace_ids and collection_ids both empty will be - rejected. Please use - [FirestoreAdmin.DeleteDatabase][google.firestore.admin.v1.FirestoreAdmin.DeleteDatabase] - instead. - - Attributes: - name (str): - Required. Database to operate. Should be of the form: - ``projects/{project_id}/databases/{database_id}``. - collection_ids (MutableSequence[str]): - Optional. IDs of the collection groups to - delete. Unspecified means all collection groups. - - Each collection group in this list must be - unique. - namespace_ids (MutableSequence[str]): - Optional. Namespaces to delete. - - An empty list means all namespaces. This is the - recommended usage for databases that don't use - namespaces. - - An empty string element represents the default - namespace. This should be used if the database - has data in non-default namespaces, but doesn't - want to delete from them. - - Each namespace in this list must be unique. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - namespace_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class BulkDeleteDocumentsResponse(proto.Message): - r"""The response for - [FirestoreAdmin.BulkDeleteDocuments][google.firestore.admin.v1.FirestoreAdmin.BulkDeleteDocuments]. - - """ - - -class GetBackupRequest(proto.Message): - r"""The request for - [FirestoreAdmin.GetBackup][google.firestore.admin.v1.FirestoreAdmin.GetBackup]. - - Attributes: - name (str): - Required. Name of the backup to fetch. - - Format is - ``projects/{project}/locations/{location}/backups/{backup}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBackupsRequest(proto.Message): - r"""The request for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - - Attributes: - parent (str): - Required. The location to list backups from. - - Format is ``projects/{project}/locations/{location}``. Use - ``{location} = '-'`` to list backups from all locations for - the given project. This allows listing backups from a single - location or from all locations. - filter (str): - An expression that filters the list of returned backups. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the - [Backup][google.firestore.admin.v1.Backup] are eligible for - filtering: - - - ``database_uid`` (supports ``=`` only) - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListBackupsResponse(proto.Message): - r"""The response for - [FirestoreAdmin.ListBackups][google.firestore.admin.v1.FirestoreAdmin.ListBackups]. - - Attributes: - backups (MutableSequence[google.cloud.firestore_admin_v1.types.Backup]): - List of all backups for the project. - unreachable (MutableSequence[str]): - List of locations that existing backups were - not able to be fetched from. - Instead of failing the entire requests when a - single location is unreachable, this response - returns a partial result set and list of - locations unable to be reached here. The request - can be retried against a single location to get - a concrete error. - """ - - backups: MutableSequence[gfa_backup.Backup] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gfa_backup.Backup, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DeleteBackupRequest(proto.Message): - r"""The request for - [FirestoreAdmin.DeleteBackup][google.firestore.admin.v1.FirestoreAdmin.DeleteBackup]. - - Attributes: - name (str): - Required. Name of the backup to delete. - - format is - ``projects/{project}/locations/{location}/backups/{backup}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RestoreDatabaseRequest(proto.Message): - r"""The request message for - [FirestoreAdmin.RestoreDatabase][google.firestore.admin.v1.FirestoreAdmin.RestoreDatabase]. - - Attributes: - parent (str): - Required. The project to restore the database in. Format is - ``projects/{project_id}``. - database_id (str): - Required. The ID to use for the database, which will become - the final component of the database's resource name. This - database ID must not be associated with an existing - database. - - This value should be 4-63 characters. Valid characters are - /[a-z][0-9]-/ with first character a letter and the last a - letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - - "(default)" database ID is also valid. - backup (str): - Required. Backup to restore from. Must be from the same - project as the parent. - - The restored database will be created in the same location - as the source backup. - - Format is: - ``projects/{project_id}/locations/{location}/backups/{backup}`` - encryption_config (google.cloud.firestore_admin_v1.types.Database.EncryptionConfig): - Optional. Encryption configuration for the restored - database. - - If this field is not specified, the restored database will - use the same encryption configuration as the backup, namely - [use_source_encryption][google.firestore.admin.v1.Database.EncryptionConfig.use_source_encryption]. - tags (MutableMapping[str, str]): - Optional. Immutable. Tags to be bound to the restored - database. - - The tags should be provided in the format of - ``tagKeys/{tag_key_id} -> tagValues/{tag_value_id}``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - database_id: str = proto.Field( - proto.STRING, - number=2, - ) - backup: str = proto.Field( - proto.STRING, - number=3, - ) - encryption_config: gfa_database.Database.EncryptionConfig = proto.Field( - proto.MESSAGE, - number=9, - message=gfa_database.Database.EncryptionConfig, - ) - tags: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - - -class CloneDatabaseRequest(proto.Message): - r"""The request message for - [FirestoreAdmin.CloneDatabase][google.firestore.admin.v1.FirestoreAdmin.CloneDatabase]. - - Attributes: - parent (str): - Required. The project to clone the database in. Format is - ``projects/{project_id}``. - database_id (str): - Required. The ID to use for the database, which will become - the final component of the database's resource name. This - database ID must not be associated with an existing - database. - - This value should be 4-63 characters. Valid characters are - /[a-z][0-9]-/ with first character a letter and the last a - letter or a number. Must not be UUID-like - /[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}/. - - "(default)" database ID is also valid. - pitr_snapshot (google.cloud.firestore_admin_v1.types.PitrSnapshot): - Required. Specification of the PITR data to - clone from. The source database must exist. - - The cloned database will be created in the same - location as the source database. - encryption_config (google.cloud.firestore_admin_v1.types.Database.EncryptionConfig): - Optional. Encryption configuration for the cloned database. - - If this field is not specified, the cloned database will use - the same encryption configuration as the source database, - namely - [use_source_encryption][google.firestore.admin.v1.Database.EncryptionConfig.use_source_encryption]. - tags (MutableMapping[str, str]): - Optional. Immutable. Tags to be bound to the cloned - database. - - The tags should be provided in the format of - ``tagKeys/{tag_key_id} -> tagValues/{tag_value_id}``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - database_id: str = proto.Field( - proto.STRING, - number=2, - ) - pitr_snapshot: snapshot.PitrSnapshot = proto.Field( - proto.MESSAGE, - number=6, - message=snapshot.PitrSnapshot, - ) - encryption_config: gfa_database.Database.EncryptionConfig = proto.Field( - proto.MESSAGE, - number=4, - message=gfa_database.Database.EncryptionConfig, - ) - tags: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_admin_v1/types/index.py b/google/cloud/firestore_admin_v1/types/index.py deleted file mode 100644 index 02f9f26470..0000000000 --- a/google/cloud/firestore_admin_v1/types/index.py +++ /dev/null @@ -1,366 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.admin.v1", - manifest={ - "Index", - }, -) - - -class Index(proto.Message): - r"""Cloud Firestore indexes enable simple and complex queries - against documents in a database. - - Attributes: - name (str): - Output only. A server defined name for this index. The form - of this name for composite indexes will be: - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{composite_index_id}`` - For single field indexes, this field will be empty. - query_scope (google.cloud.firestore_admin_v1.types.Index.QueryScope): - Indexes with a collection query scope - specified allow queries against a collection - that is the child of a specific document, - specified at query time, and that has the same - collection ID. - - Indexes with a collection group query scope - specified allow queries against all collections - descended from a specific document, specified at - query time, and that have the same collection ID - as this index. - api_scope (google.cloud.firestore_admin_v1.types.Index.ApiScope): - The API scope supported by this index. - fields (MutableSequence[google.cloud.firestore_admin_v1.types.Index.IndexField]): - The fields supported by this index. - - For composite indexes, this requires a minimum of 2 and a - maximum of 100 fields. The last field entry is always for - the field path ``__name__``. If, on creation, ``__name__`` - was not specified as the last field, it will be added - automatically with the same direction as that of the last - field defined. If the final field in a composite index is - not directional, the ``__name__`` will be ordered ASCENDING - (unless explicitly specified). - - For single field indexes, this will always be exactly one - entry with a field path equal to the field path of the - associated field. - state (google.cloud.firestore_admin_v1.types.Index.State): - Output only. The serving state of the index. - density (google.cloud.firestore_admin_v1.types.Index.Density): - Immutable. The density configuration of the - index. - multikey (bool): - Optional. Whether the index is multikey. By default, the - index is not multikey. For non-multikey indexes, none of the - paths in the index definition reach or traverse an array, - except via an explicit array index. For multikey indexes, at - most one of the paths in the index definition reach or - traverse an array, except via an explicit array index. - Violations will result in errors. - - Note this field only applies to index with - MONGODB_COMPATIBLE_API ApiScope. - shard_count (int): - Optional. The number of shards for the index. - """ - - class QueryScope(proto.Enum): - r"""Query Scope defines the scope at which a query is run. This is - specified on a StructuredQuery's ``from`` field. - - Values: - QUERY_SCOPE_UNSPECIFIED (0): - The query scope is unspecified. Not a valid - option. - COLLECTION (1): - Indexes with a collection query scope - specified allow queries against a collection - that is the child of a specific document, - specified at query time, and that has the - collection ID specified by the index. - COLLECTION_GROUP (2): - Indexes with a collection group query scope - specified allow queries against all collections - that has the collection ID specified by the - index. - COLLECTION_RECURSIVE (3): - Include all the collections's ancestor in the - index. Only available for Datastore Mode - databases. - """ - QUERY_SCOPE_UNSPECIFIED = 0 - COLLECTION = 1 - COLLECTION_GROUP = 2 - COLLECTION_RECURSIVE = 3 - - class ApiScope(proto.Enum): - r"""API Scope defines the APIs (Firestore Native, or Firestore in - Datastore Mode) that are supported for queries. - - Values: - ANY_API (0): - The index can only be used by the Firestore - Native query API. This is the default. - DATASTORE_MODE_API (1): - The index can only be used by the Firestore - in Datastore Mode query API. - MONGODB_COMPATIBLE_API (2): - The index can only be used by the MONGODB_COMPATIBLE_API. - """ - ANY_API = 0 - DATASTORE_MODE_API = 1 - MONGODB_COMPATIBLE_API = 2 - - class State(proto.Enum): - r"""The state of an index. During index creation, an index will be in - the ``CREATING`` state. If the index is created successfully, it - will transition to the ``READY`` state. If the index creation - encounters a problem, the index will transition to the - ``NEEDS_REPAIR`` state. - - Values: - STATE_UNSPECIFIED (0): - The state is unspecified. - CREATING (1): - The index is being created. - There is an active long-running operation for - the index. The index is updated when writing a - document. Some index data may exist. - READY (2): - The index is ready to be used. - The index is updated when writing a document. - The index is fully populated from all stored - documents it applies to. - NEEDS_REPAIR (3): - The index was being created, but something - went wrong. There is no active long-running - operation for the index, and the most recently - finished long-running operation failed. The - index is not updated when writing a document. - Some index data may exist. - Use the google.longrunning.Operations API to - determine why the operation that last attempted - to create this index failed, then re-create the - index. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - NEEDS_REPAIR = 3 - - class Density(proto.Enum): - r"""The density configuration for the index. - - Values: - DENSITY_UNSPECIFIED (0): - Unspecified. It will use database default - setting. This value is input only. - SPARSE_ALL (1): - In order for an index entry to be added, the document must - contain all fields specified in the index. - - This is the only allowed value for indexes having ApiScope - ``ANY_API`` and ``DATASTORE_MODE_API``. - SPARSE_ANY (2): - In order for an index entry to be added, the - document must contain at least one of the fields - specified in the index. Non-existent fields are - treated as having a NULL value when generating - index entries. - DENSE (3): - An index entry will be added regardless of - whether the document contains any of the fields - specified in the index. Non-existent fields are - treated as having a NULL value when generating - index entries. - """ - DENSITY_UNSPECIFIED = 0 - SPARSE_ALL = 1 - SPARSE_ANY = 2 - DENSE = 3 - - class IndexField(proto.Message): - r"""A field in an index. The field_path describes which field is - indexed, the value_mode describes how the field value is indexed. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field_path (str): - Can be **name**. For single field indexes, this must match - the name of the field or may be omitted. - order (google.cloud.firestore_admin_v1.types.Index.IndexField.Order): - Indicates that this field supports ordering - by the specified order or comparing using =, !=, - <, <=, >, >=. - - This field is a member of `oneof`_ ``value_mode``. - array_config (google.cloud.firestore_admin_v1.types.Index.IndexField.ArrayConfig): - Indicates that this field supports operations on - ``array_value``\ s. - - This field is a member of `oneof`_ ``value_mode``. - vector_config (google.cloud.firestore_admin_v1.types.Index.IndexField.VectorConfig): - Indicates that this field supports nearest - neighbor and distance operations on vector. - - This field is a member of `oneof`_ ``value_mode``. - """ - - class Order(proto.Enum): - r"""The supported orderings. - - Values: - ORDER_UNSPECIFIED (0): - The ordering is unspecified. Not a valid - option. - ASCENDING (1): - The field is ordered by ascending field - value. - DESCENDING (2): - The field is ordered by descending field - value. - """ - ORDER_UNSPECIFIED = 0 - ASCENDING = 1 - DESCENDING = 2 - - class ArrayConfig(proto.Enum): - r"""The supported array value configurations. - - Values: - ARRAY_CONFIG_UNSPECIFIED (0): - The index does not support additional array - queries. - CONTAINS (1): - The index supports array containment queries. - """ - ARRAY_CONFIG_UNSPECIFIED = 0 - CONTAINS = 1 - - class VectorConfig(proto.Message): - r"""The index configuration to support vector search operations - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - dimension (int): - Required. The vector dimension this - configuration applies to. - The resulting index will only include vectors of - this dimension, and can be used for vector - search with the same dimension. - flat (google.cloud.firestore_admin_v1.types.Index.IndexField.VectorConfig.FlatIndex): - Indicates the vector index is a flat index. - - This field is a member of `oneof`_ ``type``. - """ - - class FlatIndex(proto.Message): - r"""An index that stores vectors in a flat data structure, and - supports exhaustive search. - - """ - - dimension: int = proto.Field( - proto.INT32, - number=1, - ) - flat: "Index.IndexField.VectorConfig.FlatIndex" = proto.Field( - proto.MESSAGE, - number=2, - oneof="type", - message="Index.IndexField.VectorConfig.FlatIndex", - ) - - field_path: str = proto.Field( - proto.STRING, - number=1, - ) - order: "Index.IndexField.Order" = proto.Field( - proto.ENUM, - number=2, - oneof="value_mode", - enum="Index.IndexField.Order", - ) - array_config: "Index.IndexField.ArrayConfig" = proto.Field( - proto.ENUM, - number=3, - oneof="value_mode", - enum="Index.IndexField.ArrayConfig", - ) - vector_config: "Index.IndexField.VectorConfig" = proto.Field( - proto.MESSAGE, - number=4, - oneof="value_mode", - message="Index.IndexField.VectorConfig", - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - query_scope: QueryScope = proto.Field( - proto.ENUM, - number=2, - enum=QueryScope, - ) - api_scope: ApiScope = proto.Field( - proto.ENUM, - number=5, - enum=ApiScope, - ) - fields: MutableSequence[IndexField] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=IndexField, - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - density: Density = proto.Field( - proto.ENUM, - number=6, - enum=Density, - ) - multikey: bool = proto.Field( - proto.BOOL, - number=7, - ) - shard_count: int = proto.Field( - proto.INT32, - number=8, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_admin_v1/types/location.py b/google/cloud/firestore_admin_v1/types/location.py deleted file mode 100644 index 94ec176395..0000000000 --- a/google/cloud/firestore_admin_v1/types/location.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.admin.v1", - manifest={ - "LocationMetadata", - }, -) - - -class LocationMetadata(proto.Message): - r"""The metadata message for - [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata]. - - """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_admin_v1/types/operation.py b/google/cloud/firestore_admin_v1/types/operation.py deleted file mode 100644 index c504556933..0000000000 --- a/google/cloud/firestore_admin_v1/types/operation.py +++ /dev/null @@ -1,639 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.cloud.firestore_admin_v1.types import snapshot -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.admin.v1", - manifest={ - "OperationState", - "IndexOperationMetadata", - "FieldOperationMetadata", - "ExportDocumentsMetadata", - "ImportDocumentsMetadata", - "BulkDeleteDocumentsMetadata", - "ExportDocumentsResponse", - "RestoreDatabaseMetadata", - "CloneDatabaseMetadata", - "Progress", - }, -) - - -class OperationState(proto.Enum): - r"""Describes the state of the operation. - - Values: - OPERATION_STATE_UNSPECIFIED (0): - Unspecified. - INITIALIZING (1): - Request is being prepared for processing. - PROCESSING (2): - Request is actively being processed. - CANCELLING (3): - Request is in the process of being cancelled - after user called - google.longrunning.Operations.CancelOperation on - the operation. - FINALIZING (4): - Request has been processed and is in its - finalization stage. - SUCCESSFUL (5): - Request has completed successfully. - FAILED (6): - Request has finished being processed, but - encountered an error. - CANCELLED (7): - Request has finished being cancelled after - user called - google.longrunning.Operations.CancelOperation. - """ - OPERATION_STATE_UNSPECIFIED = 0 - INITIALIZING = 1 - PROCESSING = 2 - CANCELLING = 3 - FINALIZING = 4 - SUCCESSFUL = 5 - FAILED = 6 - CANCELLED = 7 - - -class IndexOperationMetadata(proto.Message): - r"""Metadata for - [google.longrunning.Operation][google.longrunning.Operation] results - from - [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation completed. Will be - unset if operation still in progress. - index (str): - The index resource that this operation is acting on. For - example: - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` - state (google.cloud.firestore_admin_v1.types.OperationState): - The state of the operation. - progress_documents (google.cloud.firestore_admin_v1.types.Progress): - The progress, in documents, of this - operation. - progress_bytes (google.cloud.firestore_admin_v1.types.Progress): - The progress, in bytes, of this operation. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - index: str = proto.Field( - proto.STRING, - number=3, - ) - state: "OperationState" = proto.Field( - proto.ENUM, - number=4, - enum="OperationState", - ) - progress_documents: "Progress" = proto.Field( - proto.MESSAGE, - number=5, - message="Progress", - ) - progress_bytes: "Progress" = proto.Field( - proto.MESSAGE, - number=6, - message="Progress", - ) - - -class FieldOperationMetadata(proto.Message): - r"""Metadata for - [google.longrunning.Operation][google.longrunning.Operation] results - from - [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation completed. Will be - unset if operation still in progress. - field (str): - The field resource that this operation is acting on. For - example: - ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`` - index_config_deltas (MutableSequence[google.cloud.firestore_admin_v1.types.FieldOperationMetadata.IndexConfigDelta]): - A list of - [IndexConfigDelta][google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta], - which describe the intent of this operation. - state (google.cloud.firestore_admin_v1.types.OperationState): - The state of the operation. - progress_documents (google.cloud.firestore_admin_v1.types.Progress): - The progress, in documents, of this - operation. - progress_bytes (google.cloud.firestore_admin_v1.types.Progress): - The progress, in bytes, of this operation. - ttl_config_delta (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.TtlConfigDelta): - Describes the deltas of TTL configuration. - """ - - class IndexConfigDelta(proto.Message): - r"""Information about an index configuration change. - - Attributes: - change_type (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.IndexConfigDelta.ChangeType): - Specifies how the index is changing. - index (google.cloud.firestore_admin_v1.types.Index): - The index being changed. - """ - - class ChangeType(proto.Enum): - r"""Specifies how the index is changing. - - Values: - CHANGE_TYPE_UNSPECIFIED (0): - The type of change is not specified or known. - ADD (1): - The single field index is being added. - REMOVE (2): - The single field index is being removed. - """ - CHANGE_TYPE_UNSPECIFIED = 0 - ADD = 1 - REMOVE = 2 - - change_type: "FieldOperationMetadata.IndexConfigDelta.ChangeType" = proto.Field( - proto.ENUM, - number=1, - enum="FieldOperationMetadata.IndexConfigDelta.ChangeType", - ) - index: gfa_index.Index = proto.Field( - proto.MESSAGE, - number=2, - message=gfa_index.Index, - ) - - class TtlConfigDelta(proto.Message): - r"""Information about a TTL configuration change. - - Attributes: - change_type (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.TtlConfigDelta.ChangeType): - Specifies how the TTL configuration is - changing. - """ - - class ChangeType(proto.Enum): - r"""Specifies how the TTL config is changing. - - Values: - CHANGE_TYPE_UNSPECIFIED (0): - The type of change is not specified or known. - ADD (1): - The TTL config is being added. - REMOVE (2): - The TTL config is being removed. - """ - CHANGE_TYPE_UNSPECIFIED = 0 - ADD = 1 - REMOVE = 2 - - change_type: "FieldOperationMetadata.TtlConfigDelta.ChangeType" = proto.Field( - proto.ENUM, - number=1, - enum="FieldOperationMetadata.TtlConfigDelta.ChangeType", - ) - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - field: str = proto.Field( - proto.STRING, - number=3, - ) - index_config_deltas: MutableSequence[IndexConfigDelta] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=IndexConfigDelta, - ) - state: "OperationState" = proto.Field( - proto.ENUM, - number=5, - enum="OperationState", - ) - progress_documents: "Progress" = proto.Field( - proto.MESSAGE, - number=6, - message="Progress", - ) - progress_bytes: "Progress" = proto.Field( - proto.MESSAGE, - number=7, - message="Progress", - ) - ttl_config_delta: TtlConfigDelta = proto.Field( - proto.MESSAGE, - number=8, - message=TtlConfigDelta, - ) - - -class ExportDocumentsMetadata(proto.Message): - r"""Metadata for - [google.longrunning.Operation][google.longrunning.Operation] results - from - [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation completed. Will be - unset if operation still in progress. - operation_state (google.cloud.firestore_admin_v1.types.OperationState): - The state of the export operation. - progress_documents (google.cloud.firestore_admin_v1.types.Progress): - The progress, in documents, of this - operation. - progress_bytes (google.cloud.firestore_admin_v1.types.Progress): - The progress, in bytes, of this operation. - collection_ids (MutableSequence[str]): - Which collection IDs are being exported. - output_uri_prefix (str): - Where the documents are being exported to. - namespace_ids (MutableSequence[str]): - Which namespace IDs are being exported. - snapshot_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp that corresponds to the version - of the database that is being exported. If - unspecified, there are no guarantees about the - consistency of the documents being exported. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - operation_state: "OperationState" = proto.Field( - proto.ENUM, - number=3, - enum="OperationState", - ) - progress_documents: "Progress" = proto.Field( - proto.MESSAGE, - number=4, - message="Progress", - ) - progress_bytes: "Progress" = proto.Field( - proto.MESSAGE, - number=5, - message="Progress", - ) - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - output_uri_prefix: str = proto.Field( - proto.STRING, - number=7, - ) - namespace_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - snapshot_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - - -class ImportDocumentsMetadata(proto.Message): - r"""Metadata for - [google.longrunning.Operation][google.longrunning.Operation] results - from - [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation completed. Will be - unset if operation still in progress. - operation_state (google.cloud.firestore_admin_v1.types.OperationState): - The state of the import operation. - progress_documents (google.cloud.firestore_admin_v1.types.Progress): - The progress, in documents, of this - operation. - progress_bytes (google.cloud.firestore_admin_v1.types.Progress): - The progress, in bytes, of this operation. - collection_ids (MutableSequence[str]): - Which collection IDs are being imported. - input_uri_prefix (str): - The location of the documents being imported. - namespace_ids (MutableSequence[str]): - Which namespace IDs are being imported. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - operation_state: "OperationState" = proto.Field( - proto.ENUM, - number=3, - enum="OperationState", - ) - progress_documents: "Progress" = proto.Field( - proto.MESSAGE, - number=4, - message="Progress", - ) - progress_bytes: "Progress" = proto.Field( - proto.MESSAGE, - number=5, - message="Progress", - ) - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - input_uri_prefix: str = proto.Field( - proto.STRING, - number=7, - ) - namespace_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - - -class BulkDeleteDocumentsMetadata(proto.Message): - r"""Metadata for - [google.longrunning.Operation][google.longrunning.Operation] results - from - [FirestoreAdmin.BulkDeleteDocuments][google.firestore.admin.v1.FirestoreAdmin.BulkDeleteDocuments]. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time this operation completed. Will be - unset if operation still in progress. - operation_state (google.cloud.firestore_admin_v1.types.OperationState): - The state of the operation. - progress_documents (google.cloud.firestore_admin_v1.types.Progress): - The progress, in documents, of this - operation. - progress_bytes (google.cloud.firestore_admin_v1.types.Progress): - The progress, in bytes, of this operation. - collection_ids (MutableSequence[str]): - The IDs of the collection groups that are - being deleted. - namespace_ids (MutableSequence[str]): - Which namespace IDs are being deleted. - snapshot_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp that corresponds to the version - of the database that is being read to get the - list of documents to delete. This time can also - be used as the timestamp of PITR in case of - disaster recovery (subject to PITR window - limit). - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - operation_state: "OperationState" = proto.Field( - proto.ENUM, - number=3, - enum="OperationState", - ) - progress_documents: "Progress" = proto.Field( - proto.MESSAGE, - number=4, - message="Progress", - ) - progress_bytes: "Progress" = proto.Field( - proto.MESSAGE, - number=5, - message="Progress", - ) - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - namespace_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - snapshot_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - - -class ExportDocumentsResponse(proto.Message): - r"""Returned in the - [google.longrunning.Operation][google.longrunning.Operation] - response field. - - Attributes: - output_uri_prefix (str): - Location of the output files. This can be - used to begin an import into Cloud Firestore - (this project or another project) after the - operation completes successfully. - """ - - output_uri_prefix: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RestoreDatabaseMetadata(proto.Message): - r"""Metadata for the [long-running - operation][google.longrunning.Operation] from the - [RestoreDatabase][google.firestore.admin.v1.RestoreDatabase] - request. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time the restore was started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time the restore finished, unset for - ongoing restores. - operation_state (google.cloud.firestore_admin_v1.types.OperationState): - The operation state of the restore. - database (str): - The name of the database being restored to. - backup (str): - The name of the backup restoring from. - progress_percentage (google.cloud.firestore_admin_v1.types.Progress): - How far along the restore is as an estimated - percentage of remaining time. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - operation_state: "OperationState" = proto.Field( - proto.ENUM, - number=3, - enum="OperationState", - ) - database: str = proto.Field( - proto.STRING, - number=4, - ) - backup: str = proto.Field( - proto.STRING, - number=5, - ) - progress_percentage: "Progress" = proto.Field( - proto.MESSAGE, - number=8, - message="Progress", - ) - - -class CloneDatabaseMetadata(proto.Message): - r"""Metadata for the [long-running - operation][google.longrunning.Operation] from the - [CloneDatabase][google.firestore.admin.v1.CloneDatabase] request. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time the clone was started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time the clone finished, unset for - ongoing clones. - operation_state (google.cloud.firestore_admin_v1.types.OperationState): - The operation state of the clone. - database (str): - The name of the database being cloned to. - pitr_snapshot (google.cloud.firestore_admin_v1.types.PitrSnapshot): - The snapshot from which this database was - cloned. - progress_percentage (google.cloud.firestore_admin_v1.types.Progress): - How far along the clone is as an estimated - percentage of remaining time. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - operation_state: "OperationState" = proto.Field( - proto.ENUM, - number=3, - enum="OperationState", - ) - database: str = proto.Field( - proto.STRING, - number=4, - ) - pitr_snapshot: snapshot.PitrSnapshot = proto.Field( - proto.MESSAGE, - number=7, - message=snapshot.PitrSnapshot, - ) - progress_percentage: "Progress" = proto.Field( - proto.MESSAGE, - number=6, - message="Progress", - ) - - -class Progress(proto.Message): - r"""Describes the progress of the operation. Unit of work is generic and - must be interpreted based on where - [Progress][google.firestore.admin.v1.Progress] is used. - - Attributes: - estimated_work (int): - The amount of work estimated. - completed_work (int): - The amount of work completed. - """ - - estimated_work: int = proto.Field( - proto.INT64, - number=1, - ) - completed_work: int = proto.Field( - proto.INT64, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_admin_v1/types/schedule.py b/google/cloud/firestore_admin_v1/types/schedule.py deleted file mode 100644 index a767edfe1f..0000000000 --- a/google/cloud/firestore_admin_v1/types/schedule.py +++ /dev/null @@ -1,148 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.admin.v1", - manifest={ - "BackupSchedule", - "DailyRecurrence", - "WeeklyRecurrence", - }, -) - - -class BackupSchedule(proto.Message): - r"""A backup schedule for a Cloud Firestore Database. - - This resource is owned by the database it is backing up, and is - deleted along with the database. The actual backups are not - though. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The unique backup schedule identifier across - all locations and databases for the given project. - - This will be auto-assigned. - - Format is - ``projects/{project}/databases/{database}/backupSchedules/{backup_schedule}`` - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which this - backup schedule was created and effective since. - - No backups will be created for this schedule - before this time. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which this backup schedule was - most recently updated. When a backup schedule is first - created, this is the same as create_time. - retention (google.protobuf.duration_pb2.Duration): - At what relative time in the future, compared - to its creation time, the backup should be - deleted, e.g. keep backups for 7 days. - - The maximum supported retention period is 14 - weeks. - daily_recurrence (google.cloud.firestore_admin_v1.types.DailyRecurrence): - For a schedule that runs daily. - - This field is a member of `oneof`_ ``recurrence``. - weekly_recurrence (google.cloud.firestore_admin_v1.types.WeeklyRecurrence): - For a schedule that runs weekly on a specific - day. - - This field is a member of `oneof`_ ``recurrence``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - retention: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=6, - message=duration_pb2.Duration, - ) - daily_recurrence: "DailyRecurrence" = proto.Field( - proto.MESSAGE, - number=7, - oneof="recurrence", - message="DailyRecurrence", - ) - weekly_recurrence: "WeeklyRecurrence" = proto.Field( - proto.MESSAGE, - number=8, - oneof="recurrence", - message="WeeklyRecurrence", - ) - - -class DailyRecurrence(proto.Message): - r"""Represents a recurring schedule that runs every day. - - The time zone is UTC. - - """ - - -class WeeklyRecurrence(proto.Message): - r"""Represents a recurring schedule that runs on a specified day - of the week. - The time zone is UTC. - - Attributes: - day (google.type.dayofweek_pb2.DayOfWeek): - The day of week to run. - - DAY_OF_WEEK_UNSPECIFIED is not allowed. - """ - - day: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=2, - enum=dayofweek_pb2.DayOfWeek, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_admin_v1/types/snapshot.py b/google/cloud/firestore_admin_v1/types/snapshot.py deleted file mode 100644 index e56a125f59..0000000000 --- a/google/cloud/firestore_admin_v1/types/snapshot.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.admin.v1", - manifest={ - "PitrSnapshot", - }, -) - - -class PitrSnapshot(proto.Message): - r"""A consistent snapshot of a database at a specific point in - time. A PITR (Point-in-time recovery) snapshot with previous - versions of a database's data is available for every minute up - to the associated database's data retention period. If the PITR - feature is enabled, the retention period is 7 days; otherwise, - it is one hour. - - Attributes: - database (str): - Required. The name of the database that this was a snapshot - of. Format: ``projects/{project}/databases/{database}``. - database_uid (bytes): - Output only. Public UUID of the database the - snapshot was associated with. - snapshot_time (google.protobuf.timestamp_pb2.Timestamp): - Required. Snapshot time of the database. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - database_uid: bytes = proto.Field( - proto.BYTES, - number=2, - ) - snapshot_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_admin_v1/types/user_creds.py b/google/cloud/firestore_admin_v1/types/user_creds.py deleted file mode 100644 index 39bd119478..0000000000 --- a/google/cloud/firestore_admin_v1/types/user_creds.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.admin.v1", - manifest={ - "UserCreds", - }, -) - - -class UserCreds(proto.Message): - r"""A Cloud Firestore User Creds. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Identifier. The resource name of the UserCreds. Format: - ``projects/{project}/databases/{database}/userCreds/{user_creds}`` - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the user creds were - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the user creds were - last updated. - state (google.cloud.firestore_admin_v1.types.UserCreds.State): - Output only. Whether the user creds are - enabled or disabled. Defaults to ENABLED on - creation. - secure_password (str): - Output only. The plaintext server-generated - password for the user creds. Only populated in - responses for CreateUserCreds and - ResetUserPassword. - resource_identity (google.cloud.firestore_admin_v1.types.UserCreds.ResourceIdentity): - Resource Identity descriptor. - - This field is a member of `oneof`_ ``UserCredsIdentity``. - """ - - class State(proto.Enum): - r"""The state of the user creds (ENABLED or DISABLED). - - Values: - STATE_UNSPECIFIED (0): - The default value. Should not be used. - ENABLED (1): - The user creds are enabled. - DISABLED (2): - The user creds are disabled. - """ - STATE_UNSPECIFIED = 0 - ENABLED = 1 - DISABLED = 2 - - class ResourceIdentity(proto.Message): - r"""Describes a Resource Identity principal. - - Attributes: - principal (str): - Output only. Principal identifier string. - See: - https://cloud.google.com/iam/docs/principal-identifiers - """ - - principal: str = proto.Field( - proto.STRING, - number=1, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - secure_password: str = proto.Field( - proto.STRING, - number=5, - ) - resource_identity: ResourceIdentity = proto.Field( - proto.MESSAGE, - number=6, - oneof="UserCredsIdentity", - message=ResourceIdentity, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_bundle/__init__.py b/google/cloud/firestore_bundle/__init__.py deleted file mode 100644 index 1b6469437b..0000000000 --- a/google/cloud/firestore_bundle/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.firestore_bundle import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .types.bundle import BundledDocumentMetadata -from .types.bundle import BundledQuery -from .types.bundle import BundleElement -from .types.bundle import BundleMetadata -from .types.bundle import NamedQuery - -from .bundle import FirestoreBundle - -__all__ = ( - "BundleElement", - "BundleMetadata", - "BundledDocumentMetadata", - "BundledQuery", - "FirestoreBundle", - "NamedQuery", -) diff --git a/google/cloud/firestore_bundle/gapic_metadata.json b/google/cloud/firestore_bundle/gapic_metadata.json deleted file mode 100644 index e81fe51253..0000000000 --- a/google/cloud/firestore_bundle/gapic_metadata.json +++ /dev/null @@ -1,7 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bundle", - "protoPackage": "google.firestore.bundle", - "schema": "1.0" -} diff --git a/google/cloud/firestore_bundle/gapic_version.py b/google/cloud/firestore_bundle/gapic_version.py deleted file mode 100644 index b5f2eaf6ce..0000000000 --- a/google/cloud/firestore_bundle/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "2.21.0" # {x-release-please-version} diff --git a/google/cloud/firestore_bundle/py.typed b/google/cloud/firestore_bundle/py.typed deleted file mode 100644 index e2987f2963..0000000000 --- a/google/cloud/firestore_bundle/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bundle package uses inline types. diff --git a/google/cloud/firestore_bundle/services/__init__.py b/google/cloud/firestore_bundle/services/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/google/cloud/firestore_bundle/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/google/cloud/firestore_bundle/types/__init__.py b/google/cloud/firestore_bundle/types/__init__.py deleted file mode 100644 index 2cc8d9fb94..0000000000 --- a/google/cloud/firestore_bundle/types/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .bundle import ( - BundledDocumentMetadata, - BundledQuery, - BundleElement, - BundleMetadata, - NamedQuery, -) - -__all__ = ( - "BundledDocumentMetadata", - "BundledQuery", - "BundleElement", - "BundleMetadata", - "NamedQuery", -) diff --git a/google/cloud/firestore_bundle/types/bundle.py b/google/cloud/firestore_bundle/types/bundle.py deleted file mode 100644 index 3671833d9b..0000000000 --- a/google/cloud/firestore_bundle/types/bundle.py +++ /dev/null @@ -1,252 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_v1.types import document as document_pb2 # type: ignore -from google.cloud.firestore_v1.types import query as query_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.bundle", - manifest={ - "BundledQuery", - "NamedQuery", - "BundledDocumentMetadata", - "BundleMetadata", - "BundleElement", - }, -) - - -class BundledQuery(proto.Message): - r"""Encodes a query saved in the bundle. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - The parent resource name. - structured_query (google.firestore.v1.query_pb2.StructuredQuery): - A structured query. - - This field is a member of `oneof`_ ``query_type``. - limit_type (google.cloud.bundle.types.BundledQuery.LimitType): - - """ - - class LimitType(proto.Enum): - r"""If the query is a limit query, should the limit be applied to - the beginning or the end of results. - - Values: - FIRST (0): - No description available. - LAST (1): - No description available. - """ - FIRST = 0 - LAST = 1 - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - structured_query: query_pb2.StructuredQuery = proto.Field( - proto.MESSAGE, - number=2, - oneof="query_type", - message=query_pb2.StructuredQuery, - ) - limit_type: LimitType = proto.Field( - proto.ENUM, - number=3, - enum=LimitType, - ) - - -class NamedQuery(proto.Message): - r"""A Query associated with a name, created as part of the bundle - file, and can be read by client SDKs once the bundle containing - them is loaded. - - Attributes: - name (str): - Name of the query, such that client can use - the name to load this query from bundle, and - resume from when the query results are - materialized into this bundle. - bundled_query (google.cloud.bundle.types.BundledQuery): - The query saved in the bundle. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The read time of the query, when it is used - to build the bundle. This is useful to resume - the query from the bundle, once it is loaded by - client SDKs. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - bundled_query: "BundledQuery" = proto.Field( - proto.MESSAGE, - number=2, - message="BundledQuery", - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class BundledDocumentMetadata(proto.Message): - r"""Metadata describing a Firestore document saved in the bundle. - - Attributes: - name (str): - The document key of a bundled document. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The snapshot version of the document data - bundled. - exists (bool): - Whether the document exists. - queries (MutableSequence[str]): - The names of the queries in this bundle that - this document matches to. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - exists: bool = proto.Field( - proto.BOOL, - number=3, - ) - queries: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class BundleMetadata(proto.Message): - r"""Metadata describing the bundle file/stream. - - Attributes: - id (str): - The ID of the bundle. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time at which the documents snapshot is taken - for this bundle. - version (int): - The schema version of the bundle. - total_documents (int): - The number of documents in the bundle. - total_bytes (int): - The size of the bundle in bytes, excluding this - ``BundleMetadata``. - """ - - id: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - version: int = proto.Field( - proto.UINT32, - number=3, - ) - total_documents: int = proto.Field( - proto.UINT32, - number=4, - ) - total_bytes: int = proto.Field( - proto.UINT64, - number=5, - ) - - -class BundleElement(proto.Message): - r"""A Firestore bundle is a length-prefixed stream of JSON - representations of ``BundleElement``. Only one ``BundleMetadata`` is - expected, and it should be the first element. The named queries - follow after ``metadata``. Every ``document_metadata`` is - immediately followed by a ``document``. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - metadata (google.cloud.bundle.types.BundleMetadata): - - This field is a member of `oneof`_ ``element_type``. - named_query (google.cloud.bundle.types.NamedQuery): - - This field is a member of `oneof`_ ``element_type``. - document_metadata (google.cloud.bundle.types.BundledDocumentMetadata): - - This field is a member of `oneof`_ ``element_type``. - document (google.firestore.v1.document_pb2.Document): - - This field is a member of `oneof`_ ``element_type``. - """ - - metadata: "BundleMetadata" = proto.Field( - proto.MESSAGE, - number=1, - oneof="element_type", - message="BundleMetadata", - ) - named_query: "NamedQuery" = proto.Field( - proto.MESSAGE, - number=2, - oneof="element_type", - message="NamedQuery", - ) - document_metadata: "BundledDocumentMetadata" = proto.Field( - proto.MESSAGE, - number=3, - oneof="element_type", - message="BundledDocumentMetadata", - ) - document: document_pb2.Document = proto.Field( - proto.MESSAGE, - number=4, - oneof="element_type", - message=document_pb2.Document, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/gapic_metadata.json b/google/cloud/firestore_v1/gapic_metadata.json deleted file mode 100644 index d0462f9640..0000000000 --- a/google/cloud/firestore_v1/gapic_metadata.json +++ /dev/null @@ -1,268 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.firestore_v1", - "protoPackage": "google.firestore.v1", - "schema": "1.0", - "services": { - "Firestore": { - "clients": { - "grpc": { - "libraryClient": "FirestoreClient", - "rpcs": { - "BatchGetDocuments": { - "methods": [ - "batch_get_documents" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateDocument": { - "methods": [ - "create_document" - ] - }, - "DeleteDocument": { - "methods": [ - "delete_document" - ] - }, - "GetDocument": { - "methods": [ - "get_document" - ] - }, - "ListCollectionIds": { - "methods": [ - "list_collection_ids" - ] - }, - "ListDocuments": { - "methods": [ - "list_documents" - ] - }, - "Listen": { - "methods": [ - "listen" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "RunAggregationQuery": { - "methods": [ - "run_aggregation_query" - ] - }, - "RunQuery": { - "methods": [ - "run_query" - ] - }, - "UpdateDocument": { - "methods": [ - "update_document" - ] - }, - "Write": { - "methods": [ - "write" - ] - } - } - }, - "grpc-async": { - "libraryClient": "FirestoreAsyncClient", - "rpcs": { - "BatchGetDocuments": { - "methods": [ - "batch_get_documents" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateDocument": { - "methods": [ - "create_document" - ] - }, - "DeleteDocument": { - "methods": [ - "delete_document" - ] - }, - "GetDocument": { - "methods": [ - "get_document" - ] - }, - "ListCollectionIds": { - "methods": [ - "list_collection_ids" - ] - }, - "ListDocuments": { - "methods": [ - "list_documents" - ] - }, - "Listen": { - "methods": [ - "listen" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "RunAggregationQuery": { - "methods": [ - "run_aggregation_query" - ] - }, - "RunQuery": { - "methods": [ - "run_query" - ] - }, - "UpdateDocument": { - "methods": [ - "update_document" - ] - }, - "Write": { - "methods": [ - "write" - ] - } - } - }, - "rest": { - "libraryClient": "FirestoreClient", - "rpcs": { - "BatchGetDocuments": { - "methods": [ - "batch_get_documents" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateDocument": { - "methods": [ - "create_document" - ] - }, - "DeleteDocument": { - "methods": [ - "delete_document" - ] - }, - "GetDocument": { - "methods": [ - "get_document" - ] - }, - "ListCollectionIds": { - "methods": [ - "list_collection_ids" - ] - }, - "ListDocuments": { - "methods": [ - "list_documents" - ] - }, - "Listen": { - "methods": [ - "listen" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "RunAggregationQuery": { - "methods": [ - "run_aggregation_query" - ] - }, - "RunQuery": { - "methods": [ - "run_query" - ] - }, - "UpdateDocument": { - "methods": [ - "update_document" - ] - }, - "Write": { - "methods": [ - "write" - ] - } - } - } - } - } - } -} diff --git a/google/cloud/firestore_v1/gapic_version.py b/google/cloud/firestore_v1/gapic_version.py deleted file mode 100644 index b5f2eaf6ce..0000000000 --- a/google/cloud/firestore_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "2.21.0" # {x-release-please-version} diff --git a/google/cloud/firestore_v1/py.typed b/google/cloud/firestore_v1/py.typed deleted file mode 100644 index 35a48b3acc..0000000000 --- a/google/cloud/firestore_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-firestore package uses inline types. diff --git a/google/cloud/firestore_v1/services/__init__.py b/google/cloud/firestore_v1/services/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/google/cloud/firestore_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/google/cloud/firestore_v1/services/firestore/__init__.py b/google/cloud/firestore_v1/services/firestore/__init__.py deleted file mode 100644 index a69a11b29e..0000000000 --- a/google/cloud/firestore_v1/services/firestore/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import FirestoreClient -from .async_client import FirestoreAsyncClient - -__all__ = ( - "FirestoreClient", - "FirestoreAsyncClient", -) diff --git a/google/cloud/firestore_v1/services/firestore/async_client.py b/google/cloud/firestore_v1/services/firestore/async_client.py deleted file mode 100644 index b904229b04..0000000000 --- a/google/cloud/firestore_v1/services/firestore/async_client.py +++ /dev/null @@ -1,2206 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import ( - Dict, - Callable, - Mapping, - MutableMapping, - MutableSequence, - Optional, - AsyncIterable, - Awaitable, - AsyncIterator, - Sequence, - Tuple, - Type, - Union, -) - -from google.cloud.firestore_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.firestore_v1.services.firestore import pagers -from google.cloud.firestore_v1.types import aggregation_result -from google.cloud.firestore_v1.types import common -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.firestore_v1.types import query -from google.cloud.firestore_v1.types import query_profile -from google.cloud.firestore_v1.types import write as gf_write -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport -from .client import FirestoreClient - -try: - from google.api_core import client_logging # type: ignore - - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class FirestoreAsyncClient: - """The Cloud Firestore service. - - Cloud Firestore is a fast, fully managed, serverless, - cloud-native NoSQL document database that simplifies storing, - syncing, and querying data for your mobile, web, and IoT apps at - global scale. Its client libraries provide live synchronization - and offline support, while its security features and - integrations with Firebase and Google Cloud Platform accelerate - building truly serverless apps. - """ - - _client: FirestoreClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = FirestoreClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = FirestoreClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = FirestoreClient._DEFAULT_UNIVERSE - - common_billing_account_path = staticmethod( - FirestoreClient.common_billing_account_path - ) - parse_common_billing_account_path = staticmethod( - FirestoreClient.parse_common_billing_account_path - ) - common_folder_path = staticmethod(FirestoreClient.common_folder_path) - parse_common_folder_path = staticmethod(FirestoreClient.parse_common_folder_path) - common_organization_path = staticmethod(FirestoreClient.common_organization_path) - parse_common_organization_path = staticmethod( - FirestoreClient.parse_common_organization_path - ) - common_project_path = staticmethod(FirestoreClient.common_project_path) - parse_common_project_path = staticmethod(FirestoreClient.parse_common_project_path) - common_location_path = staticmethod(FirestoreClient.common_location_path) - parse_common_location_path = staticmethod( - FirestoreClient.parse_common_location_path - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAsyncClient: The constructed client. - """ - return FirestoreClient.from_service_account_info.__func__(FirestoreAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreAsyncClient: The constructed client. - """ - return FirestoreClient.from_service_account_file.__func__(FirestoreAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return FirestoreClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> FirestoreTransport: - """Returns the transport used by the client instance. - - Returns: - FirestoreTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = FirestoreClient.get_transport_class - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[ - Union[str, FirestoreTransport, Callable[..., FirestoreTransport]] - ] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the firestore async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,FirestoreTransport,Callable[..., FirestoreTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the FirestoreTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = FirestoreClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - std_logging.DEBUG - ): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.firestore_v1.FirestoreAsyncClient`.", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.firestore.v1.Firestore", - "credentialsType": None, - }, - ) - - async def get_document( - self, - request: Optional[Union[firestore.GetDocumentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> document.Document: - r"""Gets a single document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_get_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.GetDocumentRequest( - transaction=b'transaction_blob', - name="name_value", - ) - - # Make the request - response = await client.get_document(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.GetDocumentRequest, dict]]): - The request object. The request for - [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.GetDocumentRequest): - request = firestore.GetDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_document - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_documents( - self, - request: Optional[Union[firestore.ListDocumentsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDocumentsAsyncPager: - r"""Lists documents. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_list_documents(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.ListDocumentsRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - page_result = client.list_documents(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.ListDocumentsRequest, dict]]): - The request object. The request for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsAsyncPager: - The response for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.ListDocumentsRequest): - request = firestore.ListDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_documents - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("parent", request.parent), - ("collection_id", request.collection_id), - ) - ), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDocumentsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_document( - self, - request: Optional[Union[firestore.UpdateDocumentRequest, dict]] = None, - *, - document: Optional[gf_document.Document] = None, - update_mask: Optional[common.DocumentMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gf_document.Document: - r"""Updates or inserts a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_update_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.UpdateDocumentRequest( - ) - - # Make the request - response = await client.update_document(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.UpdateDocumentRequest, dict]]): - The request object. The request for - [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. - document (:class:`google.cloud.firestore_v1.types.Document`): - Required. The updated document. - Creates the document if it does not - already exist. - - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.cloud.firestore_v1.types.DocumentMask`): - The fields to update. - None of the field paths in the mask may - contain a reserved name. - - If the document exists on the server and - has fields not referenced in the mask, - they are left unchanged. - Fields referenced in the mask, but not - present in the input document, are - deleted from the document on the server. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [document, update_mask] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.UpdateDocumentRequest): - request = firestore.UpdateDocumentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_document - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("document.name", request.document.name),) - ), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_document( - self, - request: Optional[Union[firestore.DeleteDocumentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_delete_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.DeleteDocumentRequest( - name="name_value", - ) - - # Make the request - await client.delete_document(request=request) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.DeleteDocumentRequest, dict]]): - The request object. The request for - [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. - name (:class:`str`): - Required. The resource name of the Document to delete. - In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.DeleteDocumentRequest): - request = firestore.DeleteDocumentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_document - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def batch_get_documents( - self, - request: Optional[Union[firestore.BatchGetDocumentsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[firestore.BatchGetDocumentsResponse]]: - r"""Gets multiple documents. - - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_batch_get_documents(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.BatchGetDocumentsRequest( - transaction=b'transaction_blob', - database="database_value", - ) - - # Make the request - stream = await client.batch_get_documents(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.BatchGetDocumentsRequest, dict]]): - The request object. The request for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]: - The streamed response for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.BatchGetDocumentsRequest): - request = firestore.BatchGetDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.batch_get_documents - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def begin_transaction( - self, - request: Optional[Union[firestore.BeginTransactionRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.BeginTransactionResponse: - r"""Starts a new transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_begin_transaction(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.BeginTransactionRequest( - database="database_value", - ) - - # Make the request - response = await client.begin_transaction(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.BeginTransactionRequest, dict]]): - The request object. The request for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.BeginTransactionResponse: - The response for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.BeginTransactionRequest): - request = firestore.BeginTransactionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.begin_transaction - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def commit( - self, - request: Optional[Union[firestore.CommitRequest, dict]] = None, - *, - database: Optional[str] = None, - writes: Optional[MutableSequence[gf_write.Write]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.CommitResponse: - r"""Commits a transaction, while optionally updating - documents. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_commit(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.CommitRequest( - database="database_value", - ) - - # Make the request - response = await client.commit(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.CommitRequest, dict]]): - The request object. The request for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - writes (:class:`MutableSequence[google.cloud.firestore_v1.types.Write]`): - The writes to apply. - - Always executed atomically and in order. - - This corresponds to the ``writes`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.CommitResponse: - The response for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, writes] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.CommitRequest): - request = firestore.CommitRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if writes: - request.writes.extend(writes) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.commit] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rollback( - self, - request: Optional[Union[firestore.RollbackRequest, dict]] = None, - *, - database: Optional[str] = None, - transaction: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Rolls back a transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_rollback(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.RollbackRequest( - database="database_value", - transaction=b'transaction_blob', - ) - - # Make the request - await client.rollback(request=request) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.RollbackRequest, dict]]): - The request object. The request for - [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction (:class:`bytes`): - Required. The transaction to roll - back. - - This corresponds to the ``transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, transaction] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.RollbackRequest): - request = firestore.RollbackRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if transaction is not None: - request.transaction = transaction - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.rollback] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def run_query( - self, - request: Optional[Union[firestore.RunQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[firestore.RunQueryResponse]]: - r"""Runs a query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_run_query(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.RunQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = await client.run_query(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.RunQueryRequest, dict]]): - The request object. The request for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.firestore_v1.types.RunQueryResponse]: - The response for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.RunQueryRequest): - request = firestore.RunQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.run_query - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_aggregation_query( - self, - request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[firestore.RunAggregationQueryResponse]]: - r"""Runs an aggregation query. - - Rather than producing [Document][google.firestore.v1.Document] - results like - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], - this API allows running an aggregation to produce a series of - [AggregationResult][google.firestore.v1.AggregationResult] - server-side. - - High-Level Example: - - :: - - -- Return the number of documents in table given a filter. - SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_run_aggregation_query(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.RunAggregationQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = await client.run_aggregation_query(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.RunAggregationQueryRequest, dict]]): - The request object. The request for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]: - The response for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.RunAggregationQueryRequest): - request = firestore.RunAggregationQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.run_aggregation_query - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def partition_query( - self, - request: Optional[Union[firestore.PartitionQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.PartitionQueryAsyncPager: - r"""Partitions a query by returning partition cursors - that can be used to run the query in parallel. The - returned partition cursors are split points that can be - used by RunQuery as starting/end points for the query - results. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_partition_query(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.PartitionQueryRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.partition_query(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.PartitionQueryRequest, dict]]): - The request object. The request for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryAsyncPager: - The response for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.PartitionQueryRequest): - request = firestore.PartitionQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.partition_query - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.PartitionQueryAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def write( - self, - requests: Optional[AsyncIterator[firestore.WriteRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[firestore.WriteResponse]]: - r"""Streams batches of document updates and deletes, in - order. This method is only available via gRPC or - WebChannel (not REST). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_write(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.WriteRequest( - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.WriteRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.write(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - - Args: - requests (AsyncIterator[`google.cloud.firestore_v1.types.WriteRequest`]): - The request object AsyncIterator. The request for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - The first request creates a stream, or resumes an - existing one from a token. - - When creating a new stream, the server replies with a - response containing only an ID and a token, to use in - the next request. - - When resuming a stream, the server first streams any - responses later than the given token, then a response - containing only an up-to-date token, to use in the next - request. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.firestore_v1.types.WriteResponse]: - The response for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.write] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def listen( - self, - requests: Optional[AsyncIterator[firestore.ListenRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[firestore.ListenResponse]]: - r"""Listens to changes. This method is only available via - gRPC or WebChannel (not REST). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_listen(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - add_target = firestore_v1.Target() - add_target.resume_token = b'resume_token_blob' - - request = firestore_v1.ListenRequest( - add_target=add_target, - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.ListenRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.listen(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - - Args: - requests (AsyncIterator[`google.cloud.firestore_v1.types.ListenRequest`]): - The request object AsyncIterator. A request for - [Firestore.Listen][google.firestore.v1.Firestore.Listen] - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.firestore_v1.types.ListenResponse]: - The response for - [Firestore.Listen][google.firestore.v1.Firestore.Listen]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.listen] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_collection_ids( - self, - request: Optional[Union[firestore.ListCollectionIdsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListCollectionIdsAsyncPager: - r"""Lists all the collection IDs underneath a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_list_collection_ids(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.ListCollectionIdsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_collection_ids(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.ListCollectionIdsRequest, dict]]): - The request object. The request for - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - parent (:class:`str`): - Required. The parent document. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsAsyncPager: - The response from - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.ListCollectionIdsRequest): - request = firestore.ListCollectionIdsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_collection_ids - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListCollectionIdsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def batch_write( - self, - request: Optional[Union[firestore.BatchWriteRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.BatchWriteResponse: - r"""Applies a batch of write operations. - - The BatchWrite method does not apply the write operations - atomically and can apply them out of order. Method does not - allow more than one write per document. Each write succeeds or - fails independently. See the - [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for - the success status of each write. - - If you require an atomically applied set of writes, use - [Commit][google.firestore.v1.Firestore.Commit] instead. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_batch_write(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.BatchWriteRequest( - database="database_value", - ) - - # Make the request - response = await client.batch_write(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.BatchWriteRequest, dict]]): - The request object. The request for - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.BatchWriteResponse: - The response from - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.BatchWriteRequest): - request = firestore.BatchWriteRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.batch_write - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_document( - self, - request: Optional[Union[firestore.CreateDocumentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> document.Document: - r"""Creates a new document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_create_document(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - request = firestore_v1.CreateDocumentRequest( - parent="parent_value", - collection_id="collection_id_value", - ) - - # Make the request - response = await client.create_document(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.CreateDocumentRequest, dict]]): - The request object. The request for - [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.CreateDocumentRequest): - request = firestore.CreateDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_document - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("parent", request.parent), - ("collection_id", request.collection_id), - ) - ), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def __aenter__(self) -> "FirestoreAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ("FirestoreAsyncClient",) diff --git a/google/cloud/firestore_v1/services/firestore/client.py b/google/cloud/firestore_v1/services/firestore/client.py deleted file mode 100644 index 8055612429..0000000000 --- a/google/cloud/firestore_v1/services/firestore/client.py +++ /dev/null @@ -1,2592 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import ( - Dict, - Callable, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Iterable, - Iterator, - Sequence, - Tuple, - Type, - Union, - cast, -) -import warnings - -from google.cloud.firestore_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.firestore_v1.services.firestore import pagers -from google.cloud.firestore_v1.types import aggregation_result -from google.cloud.firestore_v1.types import common -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.firestore_v1.types import query -from google.cloud.firestore_v1.types import query_profile -from google.cloud.firestore_v1.types import write as gf_write -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import FirestoreGrpcTransport -from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport -from .transports.rest import FirestoreRestTransport - - -class FirestoreClientMeta(type): - """Metaclass for the Firestore client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreTransport]] - _transport_registry["grpc"] = FirestoreGrpcTransport - _transport_registry["grpc_asyncio"] = FirestoreGrpcAsyncIOTransport - _transport_registry["rest"] = FirestoreRestTransport - - def get_transport_class( - cls, - label: Optional[str] = None, - ) -> Type[FirestoreTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class FirestoreClient(metaclass=FirestoreClientMeta): - """The Cloud Firestore service. - - Cloud Firestore is a fast, fully managed, serverless, - cloud-native NoSQL document database that simplifies storing, - syncing, and querying data for your mobile, web, and IoT apps at - global scale. Its client libraries provide live synchronization - and offline support, while its security features and - integrations with Firebase and Google Cloud Platform accelerate - building truly serverless apps. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "firestore.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "firestore.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FirestoreClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> FirestoreTransport: - """Returns the transport used by the client instance. - - Returns: - FirestoreTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path( - folder: str, - ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path( - organization: str, - ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path( - project: str, - ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[client_options_lib.ClientOptions] = None - ): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn( - "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning, - ) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv( - "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" - ).lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint( - api_override, client_cert_source, universe_domain, use_mtls_endpoint - ): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - _default_universe = FirestoreClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError( - f"mTLS is not supported in any universe other than {_default_universe}." - ) - api_endpoint = FirestoreClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=universe_domain - ) - return api_endpoint - - @staticmethod - def _get_universe_domain( - client_universe_domain: Optional[str], universe_domain_env: Optional[str] - ) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = FirestoreClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [ - HTTPStatus.UNAUTHORIZED, - HTTPStatus.FORBIDDEN, - HTTPStatus.NOT_FOUND, - ]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[ - Union[str, FirestoreTransport, Callable[..., FirestoreTransport]] - ] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the firestore client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,FirestoreTransport,Callable[..., FirestoreTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the FirestoreTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast( - client_options_lib.ClientOptions, self._client_options - ) - - universe_domain_opt = getattr(self._client_options, "universe_domain", None) - - ( - self._use_client_cert, - self._use_mtls_endpoint, - self._universe_domain_env, - ) = FirestoreClient._read_environment_variables() - self._client_cert_source = FirestoreClient._get_client_cert_source( - self._client_options.client_cert_source, self._use_client_cert - ) - self._universe_domain = FirestoreClient._get_universe_domain( - universe_domain_opt, self._universe_domain_env - ) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError( - "client_options.api_key and credentials are mutually exclusive" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, FirestoreTransport) - if transport_provided: - # transport is a FirestoreTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(FirestoreTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = self._api_endpoint or FirestoreClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint, - ) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr( - google.auth._default, "get_api_key_credentials" - ): - credentials = google.auth._default.get_api_key_credentials( - api_key_value - ) - - transport_init: Union[ - Type[FirestoreTransport], Callable[..., FirestoreTransport] - ] = ( - FirestoreClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., FirestoreTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - std_logging.DEBUG - ): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.firestore_v1.FirestoreClient`.", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.firestore.v1.Firestore", - "credentialsType": None, - }, - ) - - def get_document( - self, - request: Optional[Union[firestore.GetDocumentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> document.Document: - r"""Gets a single document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_get_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.GetDocumentRequest( - transaction=b'transaction_blob', - name="name_value", - ) - - # Make the request - response = client.get_document(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.GetDocumentRequest, dict]): - The request object. The request for - [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.GetDocumentRequest): - request = firestore.GetDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_document] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_documents( - self, - request: Optional[Union[firestore.ListDocumentsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDocumentsPager: - r"""Lists documents. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_list_documents(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.ListDocumentsRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - page_result = client.list_documents(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.ListDocumentsRequest, dict]): - The request object. The request for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsPager: - The response for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.ListDocumentsRequest): - request = firestore.ListDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_documents] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("parent", request.parent), - ("collection_id", request.collection_id), - ) - ), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDocumentsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_document( - self, - request: Optional[Union[firestore.UpdateDocumentRequest, dict]] = None, - *, - document: Optional[gf_document.Document] = None, - update_mask: Optional[common.DocumentMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gf_document.Document: - r"""Updates or inserts a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_update_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.UpdateDocumentRequest( - ) - - # Make the request - response = client.update_document(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.UpdateDocumentRequest, dict]): - The request object. The request for - [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. - document (google.cloud.firestore_v1.types.Document): - Required. The updated document. - Creates the document if it does not - already exist. - - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to update. - None of the field paths in the mask may - contain a reserved name. - - If the document exists on the server and - has fields not referenced in the mask, - they are left unchanged. - Fields referenced in the mask, but not - present in the input document, are - deleted from the document on the server. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [document, update_mask] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.UpdateDocumentRequest): - request = firestore.UpdateDocumentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_document] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("document.name", request.document.name),) - ), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_document( - self, - request: Optional[Union[firestore.DeleteDocumentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_delete_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.DeleteDocumentRequest( - name="name_value", - ) - - # Make the request - client.delete_document(request=request) - - Args: - request (Union[google.cloud.firestore_v1.types.DeleteDocumentRequest, dict]): - The request object. The request for - [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. - name (str): - Required. The resource name of the Document to delete. - In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.DeleteDocumentRequest): - request = firestore.DeleteDocumentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_document] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def batch_get_documents( - self, - request: Optional[Union[firestore.BatchGetDocumentsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[firestore.BatchGetDocumentsResponse]: - r"""Gets multiple documents. - - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_batch_get_documents(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.BatchGetDocumentsRequest( - transaction=b'transaction_blob', - database="database_value", - ) - - # Make the request - stream = client.batch_get_documents(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.BatchGetDocumentsRequest, dict]): - The request object. The request for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]: - The streamed response for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.BatchGetDocumentsRequest): - request = firestore.BatchGetDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_get_documents] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def begin_transaction( - self, - request: Optional[Union[firestore.BeginTransactionRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.BeginTransactionResponse: - r"""Starts a new transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_begin_transaction(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.BeginTransactionRequest( - database="database_value", - ) - - # Make the request - response = client.begin_transaction(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.BeginTransactionRequest, dict]): - The request object. The request for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.BeginTransactionResponse: - The response for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.BeginTransactionRequest): - request = firestore.BeginTransactionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.begin_transaction] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def commit( - self, - request: Optional[Union[firestore.CommitRequest, dict]] = None, - *, - database: Optional[str] = None, - writes: Optional[MutableSequence[gf_write.Write]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.CommitResponse: - r"""Commits a transaction, while optionally updating - documents. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_commit(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.CommitRequest( - database="database_value", - ) - - # Make the request - response = client.commit(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.CommitRequest, dict]): - The request object. The request for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - writes (MutableSequence[google.cloud.firestore_v1.types.Write]): - The writes to apply. - - Always executed atomically and in order. - - This corresponds to the ``writes`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.CommitResponse: - The response for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, writes] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.CommitRequest): - request = firestore.CommitRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if writes is not None: - request.writes = writes - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.commit] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rollback( - self, - request: Optional[Union[firestore.RollbackRequest, dict]] = None, - *, - database: Optional[str] = None, - transaction: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Rolls back a transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_rollback(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.RollbackRequest( - database="database_value", - transaction=b'transaction_blob', - ) - - # Make the request - client.rollback(request=request) - - Args: - request (Union[google.cloud.firestore_v1.types.RollbackRequest, dict]): - The request object. The request for - [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction (bytes): - Required. The transaction to roll - back. - - This corresponds to the ``transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, transaction] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.RollbackRequest): - request = firestore.RollbackRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if transaction is not None: - request.transaction = transaction - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rollback] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def run_query( - self, - request: Optional[Union[firestore.RunQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[firestore.RunQueryResponse]: - r"""Runs a query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_run_query(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.RunQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = client.run_query(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.RunQueryRequest, dict]): - The request object. The request for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.firestore_v1.types.RunQueryResponse]: - The response for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.RunQueryRequest): - request = firestore.RunQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_aggregation_query( - self, - request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[firestore.RunAggregationQueryResponse]: - r"""Runs an aggregation query. - - Rather than producing [Document][google.firestore.v1.Document] - results like - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], - this API allows running an aggregation to produce a series of - [AggregationResult][google.firestore.v1.AggregationResult] - server-side. - - High-Level Example: - - :: - - -- Return the number of documents in table given a filter. - SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_run_aggregation_query(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.RunAggregationQueryRequest( - transaction=b'transaction_blob', - parent="parent_value", - ) - - # Make the request - stream = client.run_aggregation_query(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.RunAggregationQueryRequest, dict]): - The request object. The request for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.firestore_v1.types.RunAggregationQueryResponse]: - The response for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.RunAggregationQueryRequest): - request = firestore.RunAggregationQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_aggregation_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def partition_query( - self, - request: Optional[Union[firestore.PartitionQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.PartitionQueryPager: - r"""Partitions a query by returning partition cursors - that can be used to run the query in parallel. The - returned partition cursors are split points that can be - used by RunQuery as starting/end points for the query - results. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_partition_query(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.PartitionQueryRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.partition_query(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.PartitionQueryRequest, dict]): - The request object. The request for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryPager: - The response for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.PartitionQueryRequest): - request = firestore.PartitionQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.partition_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.PartitionQueryPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def write( - self, - requests: Optional[Iterator[firestore.WriteRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[firestore.WriteResponse]: - r"""Streams batches of document updates and deletes, in - order. This method is only available via gRPC or - WebChannel (not REST). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_write(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.WriteRequest( - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.WriteRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.write(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - - Args: - requests (Iterator[google.cloud.firestore_v1.types.WriteRequest]): - The request object iterator. The request for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - The first request creates a stream, or resumes an - existing one from a token. - - When creating a new stream, the server replies with a - response containing only an ID and a token, to use in - the next request. - - When resuming a stream, the server first streams any - responses later than the given token, then a response - containing only an up-to-date token, to use in the next - request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.firestore_v1.types.WriteResponse]: - The response for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.write] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def listen( - self, - requests: Optional[Iterator[firestore.ListenRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[firestore.ListenResponse]: - r"""Listens to changes. This method is only available via - gRPC or WebChannel (not REST). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_listen(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - add_target = firestore_v1.Target() - add_target.resume_token = b'resume_token_blob' - - request = firestore_v1.ListenRequest( - add_target=add_target, - database="database_value", - ) - - # This method expects an iterator which contains - # 'firestore_v1.ListenRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.listen(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - - Args: - requests (Iterator[google.cloud.firestore_v1.types.ListenRequest]): - The request object iterator. A request for - [Firestore.Listen][google.firestore.v1.Firestore.Listen] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.firestore_v1.types.ListenResponse]: - The response for - [Firestore.Listen][google.firestore.v1.Firestore.Listen]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.listen] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_collection_ids( - self, - request: Optional[Union[firestore.ListCollectionIdsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListCollectionIdsPager: - r"""Lists all the collection IDs underneath a document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_list_collection_ids(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.ListCollectionIdsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_collection_ids(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.ListCollectionIdsRequest, dict]): - The request object. The request for - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - parent (str): - Required. The parent document. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsPager: - The response from - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.ListCollectionIdsRequest): - request = firestore.ListCollectionIdsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_collection_ids] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListCollectionIdsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_write( - self, - request: Optional[Union[firestore.BatchWriteRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.BatchWriteResponse: - r"""Applies a batch of write operations. - - The BatchWrite method does not apply the write operations - atomically and can apply them out of order. Method does not - allow more than one write per document. Each write succeeds or - fails independently. See the - [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for - the success status of each write. - - If you require an atomically applied set of writes, use - [Commit][google.firestore.v1.Firestore.Commit] instead. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_batch_write(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.BatchWriteRequest( - database="database_value", - ) - - # Make the request - response = client.batch_write(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.BatchWriteRequest, dict]): - The request object. The request for - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.BatchWriteResponse: - The response from - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.BatchWriteRequest): - request = firestore.BatchWriteRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_write] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_document( - self, - request: Optional[Union[firestore.CreateDocumentRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> document.Document: - r"""Creates a new document. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_create_document(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - request = firestore_v1.CreateDocumentRequest( - parent="parent_value", - collection_id="collection_id_value", - ) - - # Make the request - response = client.create_document(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.CreateDocumentRequest, dict]): - The request object. The request for - [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.firestore_v1.types.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.CreateDocumentRequest): - request = firestore.CreateDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_document] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("parent", request.parent), - ("collection_id", request.collection_id), - ) - ), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "FirestoreClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ("FirestoreClient",) diff --git a/google/cloud/firestore_v1/services/firestore/pagers.py b/google/cloud/firestore_v1/services/firestore/pagers.py deleted file mode 100644 index be9e4b7142..0000000000 --- a/google/cloud/firestore_v1/services/firestore/pagers.py +++ /dev/null @@ -1,510 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Sequence, - Tuple, - Optional, - Iterator, - Union, -) - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[ - retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None - ] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import firestore -from google.cloud.firestore_v1.types import query - - -class ListDocumentsPager: - """A pager for iterating through ``list_documents`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``documents`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDocuments`` requests and continue to iterate - through the ``documents`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., firestore.ListDocumentsResponse], - request: firestore.ListDocumentsRequest, - response: firestore.ListDocumentsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.ListDocumentsRequest): - The initial request object. - response (google.cloud.firestore_v1.types.ListDocumentsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = firestore.ListDocumentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[firestore.ListDocumentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[document.Document]: - for page in self.pages: - yield from page.documents - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListDocumentsAsyncPager: - """A pager for iterating through ``list_documents`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``documents`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDocuments`` requests and continue to iterate - through the ``documents`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[firestore.ListDocumentsResponse]], - request: firestore.ListDocumentsRequest, - response: firestore.ListDocumentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.ListDocumentsRequest): - The initial request object. - response (google.cloud.firestore_v1.types.ListDocumentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = firestore.ListDocumentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[firestore.ListDocumentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[document.Document]: - async def async_generator(): - async for page in self.pages: - for response in page.documents: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class PartitionQueryPager: - """A pager for iterating through ``partition_query`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` object, and - provides an ``__iter__`` method to iterate through its - ``partitions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``PartitionQuery`` requests and continue to iterate - through the ``partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., firestore.PartitionQueryResponse], - request: firestore.PartitionQueryRequest, - response: firestore.PartitionQueryResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.PartitionQueryRequest): - The initial request object. - response (google.cloud.firestore_v1.types.PartitionQueryResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = firestore.PartitionQueryRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[firestore.PartitionQueryResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[query.Cursor]: - for page in self.pages: - yield from page.partitions - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class PartitionQueryAsyncPager: - """A pager for iterating through ``partition_query`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``partitions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``PartitionQuery`` requests and continue to iterate - through the ``partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[firestore.PartitionQueryResponse]], - request: firestore.PartitionQueryRequest, - response: firestore.PartitionQueryResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.PartitionQueryRequest): - The initial request object. - response (google.cloud.firestore_v1.types.PartitionQueryResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = firestore.PartitionQueryRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[firestore.PartitionQueryResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[query.Cursor]: - async def async_generator(): - async for page in self.pages: - for response in page.partitions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListCollectionIdsPager: - """A pager for iterating through ``list_collection_ids`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``collection_ids`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListCollectionIds`` requests and continue to iterate - through the ``collection_ids`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., firestore.ListCollectionIdsResponse], - request: firestore.ListCollectionIdsRequest, - response: firestore.ListCollectionIdsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.ListCollectionIdsRequest): - The initial request object. - response (google.cloud.firestore_v1.types.ListCollectionIdsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = firestore.ListCollectionIdsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[firestore.ListCollectionIdsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[str]: - for page in self.pages: - yield from page.collection_ids - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListCollectionIdsAsyncPager: - """A pager for iterating through ``list_collection_ids`` requests. - - This class thinly wraps an initial - :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``collection_ids`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListCollectionIds`` requests and continue to iterate - through the ``collection_ids`` field on the - corresponding responses. - - All the usual :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[firestore.ListCollectionIdsResponse]], - request: firestore.ListCollectionIdsRequest, - response: firestore.ListCollectionIdsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.firestore_v1.types.ListCollectionIdsRequest): - The initial request object. - response (google.cloud.firestore_v1.types.ListCollectionIdsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = firestore.ListCollectionIdsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[firestore.ListCollectionIdsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[str]: - async def async_generator(): - async for page in self.pages: - for response in page.collection_ids: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/firestore_v1/services/firestore/transports/README.rst b/google/cloud/firestore_v1/services/firestore/transports/README.rst deleted file mode 100644 index 1823b6773c..0000000000 --- a/google/cloud/firestore_v1/services/firestore/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`FirestoreTransport` is the ABC for all transports. -- public child `FirestoreGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `FirestoreGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseFirestoreRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `FirestoreRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/cloud/firestore_v1/services/firestore/transports/__init__.py b/google/cloud/firestore_v1/services/firestore/transports/__init__.py deleted file mode 100644 index f3ca95f79c..0000000000 --- a/google/cloud/firestore_v1/services/firestore/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import FirestoreTransport -from .grpc import FirestoreGrpcTransport -from .grpc_asyncio import FirestoreGrpcAsyncIOTransport -from .rest import FirestoreRestTransport -from .rest import FirestoreRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreTransport]] -_transport_registry["grpc"] = FirestoreGrpcTransport -_transport_registry["grpc_asyncio"] = FirestoreGrpcAsyncIOTransport -_transport_registry["rest"] = FirestoreRestTransport - -__all__ = ( - "FirestoreTransport", - "FirestoreGrpcTransport", - "FirestoreGrpcAsyncIOTransport", - "FirestoreRestTransport", - "FirestoreRestInterceptor", -) diff --git a/google/cloud/firestore_v1/services/firestore/transports/base.py b/google/cloud/firestore_v1/services/firestore/transports/base.py deleted file mode 100644 index 02d6c0bbca..0000000000 --- a/google/cloud/firestore_v1/services/firestore/transports/base.py +++ /dev/null @@ -1,627 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.firestore_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class FirestoreTransport(abc.ABC): - """Abstract transport class for Firestore.""" - - AUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ) - - DEFAULT_HOST: str = "firestore.googleapis.com" - - def __init__( - self, - *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id - ) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience( - api_audience if api_audience else host - ) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if ( - always_use_jwt_access - and isinstance(credentials, service_account.Credentials) - and hasattr(service_account.Credentials, "with_always_use_jwt_access") - ): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.get_document: gapic_v1.method.wrap_method( - self.get_document, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_documents: gapic_v1.method.wrap_method( - self.list_documents, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_document: gapic_v1.method.wrap_method( - self.update_document, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_document: gapic_v1.method.wrap_method( - self.delete_document, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.batch_get_documents: gapic_v1.method.wrap_method( - self.batch_get_documents, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.begin_transaction: gapic_v1.method.wrap_method( - self.begin_transaction, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.commit: gapic_v1.method.wrap_method( - self.commit, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.rollback: gapic_v1.method.wrap_method( - self.rollback, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.run_query: gapic_v1.method.wrap_method( - self.run_query, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.run_aggregation_query: gapic_v1.method.wrap_method( - self.run_aggregation_query, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.partition_query: gapic_v1.method.wrap_method( - self.partition_query, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.write: gapic_v1.method.wrap_method( - self.write, - default_timeout=86400.0, - client_info=client_info, - ), - self.listen: gapic_v1.method.wrap_method( - self.listen, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=86400.0, - ), - default_timeout=86400.0, - client_info=client_info, - ), - self.list_collection_ids: gapic_v1.method.wrap_method( - self.list_collection_ids, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.batch_write: gapic_v1.method.wrap_method( - self.batch_write, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_document: gapic_v1.method.wrap_method( - self.create_document, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def get_document( - self, - ) -> Callable[ - [firestore.GetDocumentRequest], - Union[document.Document, Awaitable[document.Document]], - ]: - raise NotImplementedError() - - @property - def list_documents( - self, - ) -> Callable[ - [firestore.ListDocumentsRequest], - Union[ - firestore.ListDocumentsResponse, Awaitable[firestore.ListDocumentsResponse] - ], - ]: - raise NotImplementedError() - - @property - def update_document( - self, - ) -> Callable[ - [firestore.UpdateDocumentRequest], - Union[gf_document.Document, Awaitable[gf_document.Document]], - ]: - raise NotImplementedError() - - @property - def delete_document( - self, - ) -> Callable[ - [firestore.DeleteDocumentRequest], - Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], - ]: - raise NotImplementedError() - - @property - def batch_get_documents( - self, - ) -> Callable[ - [firestore.BatchGetDocumentsRequest], - Union[ - firestore.BatchGetDocumentsResponse, - Awaitable[firestore.BatchGetDocumentsResponse], - ], - ]: - raise NotImplementedError() - - @property - def begin_transaction( - self, - ) -> Callable[ - [firestore.BeginTransactionRequest], - Union[ - firestore.BeginTransactionResponse, - Awaitable[firestore.BeginTransactionResponse], - ], - ]: - raise NotImplementedError() - - @property - def commit( - self, - ) -> Callable[ - [firestore.CommitRequest], - Union[firestore.CommitResponse, Awaitable[firestore.CommitResponse]], - ]: - raise NotImplementedError() - - @property - def rollback( - self, - ) -> Callable[ - [firestore.RollbackRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] - ]: - raise NotImplementedError() - - @property - def run_query( - self, - ) -> Callable[ - [firestore.RunQueryRequest], - Union[firestore.RunQueryResponse, Awaitable[firestore.RunQueryResponse]], - ]: - raise NotImplementedError() - - @property - def run_aggregation_query( - self, - ) -> Callable[ - [firestore.RunAggregationQueryRequest], - Union[ - firestore.RunAggregationQueryResponse, - Awaitable[firestore.RunAggregationQueryResponse], - ], - ]: - raise NotImplementedError() - - @property - def partition_query( - self, - ) -> Callable[ - [firestore.PartitionQueryRequest], - Union[ - firestore.PartitionQueryResponse, - Awaitable[firestore.PartitionQueryResponse], - ], - ]: - raise NotImplementedError() - - @property - def write( - self, - ) -> Callable[ - [firestore.WriteRequest], - Union[firestore.WriteResponse, Awaitable[firestore.WriteResponse]], - ]: - raise NotImplementedError() - - @property - def listen( - self, - ) -> Callable[ - [firestore.ListenRequest], - Union[firestore.ListenResponse, Awaitable[firestore.ListenResponse]], - ]: - raise NotImplementedError() - - @property - def list_collection_ids( - self, - ) -> Callable[ - [firestore.ListCollectionIdsRequest], - Union[ - firestore.ListCollectionIdsResponse, - Awaitable[firestore.ListCollectionIdsResponse], - ], - ]: - raise NotImplementedError() - - @property - def batch_write( - self, - ) -> Callable[ - [firestore.BatchWriteRequest], - Union[firestore.BatchWriteResponse, Awaitable[firestore.BatchWriteResponse]], - ]: - raise NotImplementedError() - - @property - def create_document( - self, - ) -> Callable[ - [firestore.CreateDocumentRequest], - Union[document.Document, Awaitable[document.Document]], - ]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[ - operations_pb2.ListOperationsResponse, - Awaitable[operations_pb2.ListOperationsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ("FirestoreTransport",) diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc.py b/google/cloud/firestore_v1/services/firestore/transports/grpc.py deleted file mode 100644 index 3c5bded2d3..0000000000 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc.py +++ /dev/null @@ -1,867 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import FirestoreTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - std_logging.DEBUG - ) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = ( - dict([(k, str(v)) for k, v in response_metadata]) - if response_metadata - else None - ) - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class FirestoreGrpcTransport(FirestoreTransport): - """gRPC backend transport for Firestore. - - The Cloud Firestore service. - - Cloud Firestore is a fast, fully managed, serverless, - cloud-native NoSQL document database that simplifies storing, - syncing, and querying data for your mobile, web, and IoT apps at - global scale. Its client libraries provide live synchronization - and offline support, while its security features and - integrations with Firebase and Google Cloud Platform accelerate - building truly serverless apps. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _stubs: Dict[str, Callable] - - def __init__( - self, - *, - host: str = "firestore.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel( - self._grpc_channel, self._interceptor - ) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel( - cls, - host: str = "firestore.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service.""" - return self._grpc_channel - - @property - def get_document( - self, - ) -> Callable[[firestore.GetDocumentRequest], document.Document]: - r"""Return a callable for the get document method over gRPC. - - Gets a single document. - - Returns: - Callable[[~.GetDocumentRequest], - ~.Document]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_document" not in self._stubs: - self._stubs["get_document"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/GetDocument", - request_serializer=firestore.GetDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs["get_document"] - - @property - def list_documents( - self, - ) -> Callable[[firestore.ListDocumentsRequest], firestore.ListDocumentsResponse]: - r"""Return a callable for the list documents method over gRPC. - - Lists documents. - - Returns: - Callable[[~.ListDocumentsRequest], - ~.ListDocumentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_documents" not in self._stubs: - self._stubs["list_documents"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/ListDocuments", - request_serializer=firestore.ListDocumentsRequest.serialize, - response_deserializer=firestore.ListDocumentsResponse.deserialize, - ) - return self._stubs["list_documents"] - - @property - def update_document( - self, - ) -> Callable[[firestore.UpdateDocumentRequest], gf_document.Document]: - r"""Return a callable for the update document method over gRPC. - - Updates or inserts a document. - - Returns: - Callable[[~.UpdateDocumentRequest], - ~.Document]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_document" not in self._stubs: - self._stubs["update_document"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/UpdateDocument", - request_serializer=firestore.UpdateDocumentRequest.serialize, - response_deserializer=gf_document.Document.deserialize, - ) - return self._stubs["update_document"] - - @property - def delete_document( - self, - ) -> Callable[[firestore.DeleteDocumentRequest], empty_pb2.Empty]: - r"""Return a callable for the delete document method over gRPC. - - Deletes a document. - - Returns: - Callable[[~.DeleteDocumentRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_document" not in self._stubs: - self._stubs["delete_document"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/DeleteDocument", - request_serializer=firestore.DeleteDocumentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_document"] - - @property - def batch_get_documents( - self, - ) -> Callable[ - [firestore.BatchGetDocumentsRequest], firestore.BatchGetDocumentsResponse - ]: - r"""Return a callable for the batch get documents method over gRPC. - - Gets multiple documents. - - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - Returns: - Callable[[~.BatchGetDocumentsRequest], - ~.BatchGetDocumentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "batch_get_documents" not in self._stubs: - self._stubs["batch_get_documents"] = self._logged_channel.unary_stream( - "/google.firestore.v1.Firestore/BatchGetDocuments", - request_serializer=firestore.BatchGetDocumentsRequest.serialize, - response_deserializer=firestore.BatchGetDocumentsResponse.deserialize, - ) - return self._stubs["batch_get_documents"] - - @property - def begin_transaction( - self, - ) -> Callable[ - [firestore.BeginTransactionRequest], firestore.BeginTransactionResponse - ]: - r"""Return a callable for the begin transaction method over gRPC. - - Starts a new transaction. - - Returns: - Callable[[~.BeginTransactionRequest], - ~.BeginTransactionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "begin_transaction" not in self._stubs: - self._stubs["begin_transaction"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/BeginTransaction", - request_serializer=firestore.BeginTransactionRequest.serialize, - response_deserializer=firestore.BeginTransactionResponse.deserialize, - ) - return self._stubs["begin_transaction"] - - @property - def commit(self) -> Callable[[firestore.CommitRequest], firestore.CommitResponse]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction, while optionally updating - documents. - - Returns: - Callable[[~.CommitRequest], - ~.CommitResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "commit" not in self._stubs: - self._stubs["commit"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/Commit", - request_serializer=firestore.CommitRequest.serialize, - response_deserializer=firestore.CommitResponse.deserialize, - ) - return self._stubs["commit"] - - @property - def rollback(self) -> Callable[[firestore.RollbackRequest], empty_pb2.Empty]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction. - - Returns: - Callable[[~.RollbackRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "rollback" not in self._stubs: - self._stubs["rollback"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/Rollback", - request_serializer=firestore.RollbackRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["rollback"] - - @property - def run_query( - self, - ) -> Callable[[firestore.RunQueryRequest], firestore.RunQueryResponse]: - r"""Return a callable for the run query method over gRPC. - - Runs a query. - - Returns: - Callable[[~.RunQueryRequest], - ~.RunQueryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "run_query" not in self._stubs: - self._stubs["run_query"] = self._logged_channel.unary_stream( - "/google.firestore.v1.Firestore/RunQuery", - request_serializer=firestore.RunQueryRequest.serialize, - response_deserializer=firestore.RunQueryResponse.deserialize, - ) - return self._stubs["run_query"] - - @property - def run_aggregation_query( - self, - ) -> Callable[ - [firestore.RunAggregationQueryRequest], firestore.RunAggregationQueryResponse - ]: - r"""Return a callable for the run aggregation query method over gRPC. - - Runs an aggregation query. - - Rather than producing [Document][google.firestore.v1.Document] - results like - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], - this API allows running an aggregation to produce a series of - [AggregationResult][google.firestore.v1.AggregationResult] - server-side. - - High-Level Example: - - :: - - -- Return the number of documents in table given a filter. - SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); - - Returns: - Callable[[~.RunAggregationQueryRequest], - ~.RunAggregationQueryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "run_aggregation_query" not in self._stubs: - self._stubs["run_aggregation_query"] = self._logged_channel.unary_stream( - "/google.firestore.v1.Firestore/RunAggregationQuery", - request_serializer=firestore.RunAggregationQueryRequest.serialize, - response_deserializer=firestore.RunAggregationQueryResponse.deserialize, - ) - return self._stubs["run_aggregation_query"] - - @property - def partition_query( - self, - ) -> Callable[[firestore.PartitionQueryRequest], firestore.PartitionQueryResponse]: - r"""Return a callable for the partition query method over gRPC. - - Partitions a query by returning partition cursors - that can be used to run the query in parallel. The - returned partition cursors are split points that can be - used by RunQuery as starting/end points for the query - results. - - Returns: - Callable[[~.PartitionQueryRequest], - ~.PartitionQueryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "partition_query" not in self._stubs: - self._stubs["partition_query"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/PartitionQuery", - request_serializer=firestore.PartitionQueryRequest.serialize, - response_deserializer=firestore.PartitionQueryResponse.deserialize, - ) - return self._stubs["partition_query"] - - @property - def write(self) -> Callable[[firestore.WriteRequest], firestore.WriteResponse]: - r"""Return a callable for the write method over gRPC. - - Streams batches of document updates and deletes, in - order. This method is only available via gRPC or - WebChannel (not REST). - - Returns: - Callable[[~.WriteRequest], - ~.WriteResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "write" not in self._stubs: - self._stubs["write"] = self._logged_channel.stream_stream( - "/google.firestore.v1.Firestore/Write", - request_serializer=firestore.WriteRequest.serialize, - response_deserializer=firestore.WriteResponse.deserialize, - ) - return self._stubs["write"] - - @property - def listen(self) -> Callable[[firestore.ListenRequest], firestore.ListenResponse]: - r"""Return a callable for the listen method over gRPC. - - Listens to changes. This method is only available via - gRPC or WebChannel (not REST). - - Returns: - Callable[[~.ListenRequest], - ~.ListenResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "listen" not in self._stubs: - self._stubs["listen"] = self._logged_channel.stream_stream( - "/google.firestore.v1.Firestore/Listen", - request_serializer=firestore.ListenRequest.serialize, - response_deserializer=firestore.ListenResponse.deserialize, - ) - return self._stubs["listen"] - - @property - def list_collection_ids( - self, - ) -> Callable[ - [firestore.ListCollectionIdsRequest], firestore.ListCollectionIdsResponse - ]: - r"""Return a callable for the list collection ids method over gRPC. - - Lists all the collection IDs underneath a document. - - Returns: - Callable[[~.ListCollectionIdsRequest], - ~.ListCollectionIdsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_collection_ids" not in self._stubs: - self._stubs["list_collection_ids"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/ListCollectionIds", - request_serializer=firestore.ListCollectionIdsRequest.serialize, - response_deserializer=firestore.ListCollectionIdsResponse.deserialize, - ) - return self._stubs["list_collection_ids"] - - @property - def batch_write( - self, - ) -> Callable[[firestore.BatchWriteRequest], firestore.BatchWriteResponse]: - r"""Return a callable for the batch write method over gRPC. - - Applies a batch of write operations. - - The BatchWrite method does not apply the write operations - atomically and can apply them out of order. Method does not - allow more than one write per document. Each write succeeds or - fails independently. See the - [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for - the success status of each write. - - If you require an atomically applied set of writes, use - [Commit][google.firestore.v1.Firestore.Commit] instead. - - Returns: - Callable[[~.BatchWriteRequest], - ~.BatchWriteResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "batch_write" not in self._stubs: - self._stubs["batch_write"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/BatchWrite", - request_serializer=firestore.BatchWriteRequest.serialize, - response_deserializer=firestore.BatchWriteResponse.deserialize, - ) - return self._stubs["batch_write"] - - @property - def create_document( - self, - ) -> Callable[[firestore.CreateDocumentRequest], document.Document]: - r"""Return a callable for the create document method over gRPC. - - Creates a new document. - - Returns: - Callable[[~.CreateDocumentRequest], - ~.Document]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_document" not in self._stubs: - self._stubs["create_document"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/CreateDocument", - request_serializer=firestore.CreateDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs["create_document"] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse - ]: - r"""Return a callable for the list_operations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ("FirestoreGrpcTransport",) diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py deleted file mode 100644 index 6cc93e21a5..0000000000 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py +++ /dev/null @@ -1,1176 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import FirestoreTransport, DEFAULT_CLIENT_INFO -from .grpc import FirestoreGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor( - grpc.aio.UnaryUnaryClientInterceptor -): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - std_logging.DEBUG - ) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = ( - dict([(k, str(v)) for k, v in response_metadata]) - if response_metadata - else None - ) - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class FirestoreGrpcAsyncIOTransport(FirestoreTransport): - """gRPC AsyncIO backend transport for Firestore. - - The Cloud Firestore service. - - Cloud Firestore is a fast, fully managed, serverless, - cloud-native NoSQL document database that simplifies storing, - syncing, and querying data for your mobile, web, and IoT apps at - global scale. Its client libraries provide live synchronization - and offline support, while its security features and - integrations with Firebase and Google Cloud Platform accelerate - building truly serverless apps. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel( - cls, - host: str = "firestore.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - def __init__( - self, - *, - host: str = "firestore.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = ( - "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - ) - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def get_document( - self, - ) -> Callable[[firestore.GetDocumentRequest], Awaitable[document.Document]]: - r"""Return a callable for the get document method over gRPC. - - Gets a single document. - - Returns: - Callable[[~.GetDocumentRequest], - Awaitable[~.Document]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_document" not in self._stubs: - self._stubs["get_document"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/GetDocument", - request_serializer=firestore.GetDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs["get_document"] - - @property - def list_documents( - self, - ) -> Callable[ - [firestore.ListDocumentsRequest], Awaitable[firestore.ListDocumentsResponse] - ]: - r"""Return a callable for the list documents method over gRPC. - - Lists documents. - - Returns: - Callable[[~.ListDocumentsRequest], - Awaitable[~.ListDocumentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_documents" not in self._stubs: - self._stubs["list_documents"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/ListDocuments", - request_serializer=firestore.ListDocumentsRequest.serialize, - response_deserializer=firestore.ListDocumentsResponse.deserialize, - ) - return self._stubs["list_documents"] - - @property - def update_document( - self, - ) -> Callable[[firestore.UpdateDocumentRequest], Awaitable[gf_document.Document]]: - r"""Return a callable for the update document method over gRPC. - - Updates or inserts a document. - - Returns: - Callable[[~.UpdateDocumentRequest], - Awaitable[~.Document]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_document" not in self._stubs: - self._stubs["update_document"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/UpdateDocument", - request_serializer=firestore.UpdateDocumentRequest.serialize, - response_deserializer=gf_document.Document.deserialize, - ) - return self._stubs["update_document"] - - @property - def delete_document( - self, - ) -> Callable[[firestore.DeleteDocumentRequest], Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete document method over gRPC. - - Deletes a document. - - Returns: - Callable[[~.DeleteDocumentRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_document" not in self._stubs: - self._stubs["delete_document"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/DeleteDocument", - request_serializer=firestore.DeleteDocumentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_document"] - - @property - def batch_get_documents( - self, - ) -> Callable[ - [firestore.BatchGetDocumentsRequest], - Awaitable[firestore.BatchGetDocumentsResponse], - ]: - r"""Return a callable for the batch get documents method over gRPC. - - Gets multiple documents. - - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - Returns: - Callable[[~.BatchGetDocumentsRequest], - Awaitable[~.BatchGetDocumentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "batch_get_documents" not in self._stubs: - self._stubs["batch_get_documents"] = self._logged_channel.unary_stream( - "/google.firestore.v1.Firestore/BatchGetDocuments", - request_serializer=firestore.BatchGetDocumentsRequest.serialize, - response_deserializer=firestore.BatchGetDocumentsResponse.deserialize, - ) - return self._stubs["batch_get_documents"] - - @property - def begin_transaction( - self, - ) -> Callable[ - [firestore.BeginTransactionRequest], - Awaitable[firestore.BeginTransactionResponse], - ]: - r"""Return a callable for the begin transaction method over gRPC. - - Starts a new transaction. - - Returns: - Callable[[~.BeginTransactionRequest], - Awaitable[~.BeginTransactionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "begin_transaction" not in self._stubs: - self._stubs["begin_transaction"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/BeginTransaction", - request_serializer=firestore.BeginTransactionRequest.serialize, - response_deserializer=firestore.BeginTransactionResponse.deserialize, - ) - return self._stubs["begin_transaction"] - - @property - def commit( - self, - ) -> Callable[[firestore.CommitRequest], Awaitable[firestore.CommitResponse]]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction, while optionally updating - documents. - - Returns: - Callable[[~.CommitRequest], - Awaitable[~.CommitResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "commit" not in self._stubs: - self._stubs["commit"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/Commit", - request_serializer=firestore.CommitRequest.serialize, - response_deserializer=firestore.CommitResponse.deserialize, - ) - return self._stubs["commit"] - - @property - def rollback( - self, - ) -> Callable[[firestore.RollbackRequest], Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction. - - Returns: - Callable[[~.RollbackRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "rollback" not in self._stubs: - self._stubs["rollback"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/Rollback", - request_serializer=firestore.RollbackRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["rollback"] - - @property - def run_query( - self, - ) -> Callable[[firestore.RunQueryRequest], Awaitable[firestore.RunQueryResponse]]: - r"""Return a callable for the run query method over gRPC. - - Runs a query. - - Returns: - Callable[[~.RunQueryRequest], - Awaitable[~.RunQueryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "run_query" not in self._stubs: - self._stubs["run_query"] = self._logged_channel.unary_stream( - "/google.firestore.v1.Firestore/RunQuery", - request_serializer=firestore.RunQueryRequest.serialize, - response_deserializer=firestore.RunQueryResponse.deserialize, - ) - return self._stubs["run_query"] - - @property - def run_aggregation_query( - self, - ) -> Callable[ - [firestore.RunAggregationQueryRequest], - Awaitable[firestore.RunAggregationQueryResponse], - ]: - r"""Return a callable for the run aggregation query method over gRPC. - - Runs an aggregation query. - - Rather than producing [Document][google.firestore.v1.Document] - results like - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery], - this API allows running an aggregation to produce a series of - [AggregationResult][google.firestore.v1.AggregationResult] - server-side. - - High-Level Example: - - :: - - -- Return the number of documents in table given a filter. - SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); - - Returns: - Callable[[~.RunAggregationQueryRequest], - Awaitable[~.RunAggregationQueryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "run_aggregation_query" not in self._stubs: - self._stubs["run_aggregation_query"] = self._logged_channel.unary_stream( - "/google.firestore.v1.Firestore/RunAggregationQuery", - request_serializer=firestore.RunAggregationQueryRequest.serialize, - response_deserializer=firestore.RunAggregationQueryResponse.deserialize, - ) - return self._stubs["run_aggregation_query"] - - @property - def partition_query( - self, - ) -> Callable[ - [firestore.PartitionQueryRequest], Awaitable[firestore.PartitionQueryResponse] - ]: - r"""Return a callable for the partition query method over gRPC. - - Partitions a query by returning partition cursors - that can be used to run the query in parallel. The - returned partition cursors are split points that can be - used by RunQuery as starting/end points for the query - results. - - Returns: - Callable[[~.PartitionQueryRequest], - Awaitable[~.PartitionQueryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "partition_query" not in self._stubs: - self._stubs["partition_query"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/PartitionQuery", - request_serializer=firestore.PartitionQueryRequest.serialize, - response_deserializer=firestore.PartitionQueryResponse.deserialize, - ) - return self._stubs["partition_query"] - - @property - def write( - self, - ) -> Callable[[firestore.WriteRequest], Awaitable[firestore.WriteResponse]]: - r"""Return a callable for the write method over gRPC. - - Streams batches of document updates and deletes, in - order. This method is only available via gRPC or - WebChannel (not REST). - - Returns: - Callable[[~.WriteRequest], - Awaitable[~.WriteResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "write" not in self._stubs: - self._stubs["write"] = self._logged_channel.stream_stream( - "/google.firestore.v1.Firestore/Write", - request_serializer=firestore.WriteRequest.serialize, - response_deserializer=firestore.WriteResponse.deserialize, - ) - return self._stubs["write"] - - @property - def listen( - self, - ) -> Callable[[firestore.ListenRequest], Awaitable[firestore.ListenResponse]]: - r"""Return a callable for the listen method over gRPC. - - Listens to changes. This method is only available via - gRPC or WebChannel (not REST). - - Returns: - Callable[[~.ListenRequest], - Awaitable[~.ListenResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "listen" not in self._stubs: - self._stubs["listen"] = self._logged_channel.stream_stream( - "/google.firestore.v1.Firestore/Listen", - request_serializer=firestore.ListenRequest.serialize, - response_deserializer=firestore.ListenResponse.deserialize, - ) - return self._stubs["listen"] - - @property - def list_collection_ids( - self, - ) -> Callable[ - [firestore.ListCollectionIdsRequest], - Awaitable[firestore.ListCollectionIdsResponse], - ]: - r"""Return a callable for the list collection ids method over gRPC. - - Lists all the collection IDs underneath a document. - - Returns: - Callable[[~.ListCollectionIdsRequest], - Awaitable[~.ListCollectionIdsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_collection_ids" not in self._stubs: - self._stubs["list_collection_ids"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/ListCollectionIds", - request_serializer=firestore.ListCollectionIdsRequest.serialize, - response_deserializer=firestore.ListCollectionIdsResponse.deserialize, - ) - return self._stubs["list_collection_ids"] - - @property - def batch_write( - self, - ) -> Callable[ - [firestore.BatchWriteRequest], Awaitable[firestore.BatchWriteResponse] - ]: - r"""Return a callable for the batch write method over gRPC. - - Applies a batch of write operations. - - The BatchWrite method does not apply the write operations - atomically and can apply them out of order. Method does not - allow more than one write per document. Each write succeeds or - fails independently. See the - [BatchWriteResponse][google.firestore.v1.BatchWriteResponse] for - the success status of each write. - - If you require an atomically applied set of writes, use - [Commit][google.firestore.v1.Firestore.Commit] instead. - - Returns: - Callable[[~.BatchWriteRequest], - Awaitable[~.BatchWriteResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "batch_write" not in self._stubs: - self._stubs["batch_write"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/BatchWrite", - request_serializer=firestore.BatchWriteRequest.serialize, - response_deserializer=firestore.BatchWriteResponse.deserialize, - ) - return self._stubs["batch_write"] - - @property - def create_document( - self, - ) -> Callable[[firestore.CreateDocumentRequest], Awaitable[document.Document]]: - r"""Return a callable for the create document method over gRPC. - - Creates a new document. - - Returns: - Callable[[~.CreateDocumentRequest], - Awaitable[~.Document]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_document" not in self._stubs: - self._stubs["create_document"] = self._logged_channel.unary_unary( - "/google.firestore.v1.Firestore/CreateDocument", - request_serializer=firestore.CreateDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs["create_document"] - - def _prep_wrapped_messages(self, client_info): - """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.get_document: self._wrap_method( - self.get_document, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_documents: self._wrap_method( - self.list_documents, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_document: self._wrap_method( - self.update_document, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_document: self._wrap_method( - self.delete_document, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.batch_get_documents: self._wrap_method( - self.batch_get_documents, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.begin_transaction: self._wrap_method( - self.begin_transaction, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.commit: self._wrap_method( - self.commit, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.rollback: self._wrap_method( - self.rollback, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.run_query: self._wrap_method( - self.run_query, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.run_aggregation_query: self._wrap_method( - self.run_aggregation_query, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.partition_query: self._wrap_method( - self.partition_query, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.write: self._wrap_method( - self.write, - default_timeout=86400.0, - client_info=client_info, - ), - self.listen: self._wrap_method( - self.listen, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=86400.0, - ), - default_timeout=86400.0, - client_info=client_info, - ), - self.list_collection_ids: self._wrap_method( - self.list_collection_ids, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.batch_write: self._wrap_method( - self.batch_write, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_document: self._wrap_method( - self.create_document, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse - ]: - r"""Return a callable for the list_operations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ("FirestoreGrpcAsyncIOTransport",) diff --git a/google/cloud/firestore_v1/services/firestore/transports/rest.py b/google/cloud/firestore_v1/services/firestore/transports/rest.py deleted file mode 100644 index a32a7e84ea..0000000000 --- a/google/cloud/firestore_v1/services/firestore/transports/rest.py +++ /dev/null @@ -1,3760 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseFirestoreRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class FirestoreRestInterceptor: - """Interceptor for Firestore. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the FirestoreRestTransport. - - .. code-block:: python - class MyCustomFirestoreInterceptor(FirestoreRestInterceptor): - def pre_batch_get_documents(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_get_documents(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_batch_write(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_write(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_begin_transaction(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_begin_transaction(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_commit(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_commit(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_document(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_document(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_document(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_document(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_document(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_collection_ids(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_collection_ids(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_documents(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_documents(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_partition_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_partition_query(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rollback(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_run_aggregation_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_run_aggregation_query(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_run_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_run_query(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_document(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_document(self, response): - logging.log(f"Received response: {response}") - return response - - transport = FirestoreRestTransport(interceptor=MyCustomFirestoreInterceptor()) - client = FirestoreClient(transport=transport) - - - """ - - def pre_batch_get_documents( - self, - request: firestore.BatchGetDocumentsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.BatchGetDocumentsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for batch_get_documents - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_batch_get_documents( - self, response: rest_streaming.ResponseIterator - ) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for batch_get_documents - - DEPRECATED. Please use the `post_batch_get_documents_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_batch_get_documents` interceptor runs - before the `post_batch_get_documents_with_metadata` interceptor. - """ - return response - - def post_batch_get_documents_with_metadata( - self, - response: rest_streaming.ResponseIterator, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for batch_get_documents - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_batch_get_documents_with_metadata` - interceptor in new development instead of the `post_batch_get_documents` interceptor. - When both interceptors are used, this `post_batch_get_documents_with_metadata` interceptor runs after the - `post_batch_get_documents` interceptor. The (possibly modified) response returned by - `post_batch_get_documents` will be passed to - `post_batch_get_documents_with_metadata`. - """ - return response, metadata - - def pre_batch_write( - self, - request: firestore.BatchWriteRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[firestore.BatchWriteRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for batch_write - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_batch_write( - self, response: firestore.BatchWriteResponse - ) -> firestore.BatchWriteResponse: - """Post-rpc interceptor for batch_write - - DEPRECATED. Please use the `post_batch_write_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_batch_write` interceptor runs - before the `post_batch_write_with_metadata` interceptor. - """ - return response - - def post_batch_write_with_metadata( - self, - response: firestore.BatchWriteResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[firestore.BatchWriteResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for batch_write - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_batch_write_with_metadata` - interceptor in new development instead of the `post_batch_write` interceptor. - When both interceptors are used, this `post_batch_write_with_metadata` interceptor runs after the - `post_batch_write` interceptor. The (possibly modified) response returned by - `post_batch_write` will be passed to - `post_batch_write_with_metadata`. - """ - return response, metadata - - def pre_begin_transaction( - self, - request: firestore.BeginTransactionRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.BeginTransactionRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for begin_transaction - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_begin_transaction( - self, response: firestore.BeginTransactionResponse - ) -> firestore.BeginTransactionResponse: - """Post-rpc interceptor for begin_transaction - - DEPRECATED. Please use the `post_begin_transaction_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_begin_transaction` interceptor runs - before the `post_begin_transaction_with_metadata` interceptor. - """ - return response - - def post_begin_transaction_with_metadata( - self, - response: firestore.BeginTransactionResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.BeginTransactionResponse, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for begin_transaction - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_begin_transaction_with_metadata` - interceptor in new development instead of the `post_begin_transaction` interceptor. - When both interceptors are used, this `post_begin_transaction_with_metadata` interceptor runs after the - `post_begin_transaction` interceptor. The (possibly modified) response returned by - `post_begin_transaction` will be passed to - `post_begin_transaction_with_metadata`. - """ - return response, metadata - - def pre_commit( - self, - request: firestore.CommitRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[firestore.CommitRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for commit - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_commit( - self, response: firestore.CommitResponse - ) -> firestore.CommitResponse: - """Post-rpc interceptor for commit - - DEPRECATED. Please use the `post_commit_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_commit` interceptor runs - before the `post_commit_with_metadata` interceptor. - """ - return response - - def post_commit_with_metadata( - self, - response: firestore.CommitResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[firestore.CommitResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for commit - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_commit_with_metadata` - interceptor in new development instead of the `post_commit` interceptor. - When both interceptors are used, this `post_commit_with_metadata` interceptor runs after the - `post_commit` interceptor. The (possibly modified) response returned by - `post_commit` will be passed to - `post_commit_with_metadata`. - """ - return response, metadata - - def pre_create_document( - self, - request: firestore.CreateDocumentRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.CreateDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for create_document - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_create_document(self, response: document.Document) -> document.Document: - """Post-rpc interceptor for create_document - - DEPRECATED. Please use the `post_create_document_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_create_document` interceptor runs - before the `post_create_document_with_metadata` interceptor. - """ - return response - - def post_create_document_with_metadata( - self, - response: document.Document, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_document - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_create_document_with_metadata` - interceptor in new development instead of the `post_create_document` interceptor. - When both interceptors are used, this `post_create_document_with_metadata` interceptor runs after the - `post_create_document` interceptor. The (possibly modified) response returned by - `post_create_document` will be passed to - `post_create_document_with_metadata`. - """ - return response, metadata - - def pre_delete_document( - self, - request: firestore.DeleteDocumentRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.DeleteDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for delete_document - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def pre_get_document( - self, - request: firestore.GetDocumentRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[firestore.GetDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_document - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_get_document(self, response: document.Document) -> document.Document: - """Post-rpc interceptor for get_document - - DEPRECATED. Please use the `post_get_document_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_get_document` interceptor runs - before the `post_get_document_with_metadata` interceptor. - """ - return response - - def post_get_document_with_metadata( - self, - response: document.Document, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_document - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_get_document_with_metadata` - interceptor in new development instead of the `post_get_document` interceptor. - When both interceptors are used, this `post_get_document_with_metadata` interceptor runs after the - `post_get_document` interceptor. The (possibly modified) response returned by - `post_get_document` will be passed to - `post_get_document_with_metadata`. - """ - return response, metadata - - def pre_list_collection_ids( - self, - request: firestore.ListCollectionIdsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.ListCollectionIdsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for list_collection_ids - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_list_collection_ids( - self, response: firestore.ListCollectionIdsResponse - ) -> firestore.ListCollectionIdsResponse: - """Post-rpc interceptor for list_collection_ids - - DEPRECATED. Please use the `post_list_collection_ids_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_list_collection_ids` interceptor runs - before the `post_list_collection_ids_with_metadata` interceptor. - """ - return response - - def post_list_collection_ids_with_metadata( - self, - response: firestore.ListCollectionIdsResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.ListCollectionIdsResponse, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for list_collection_ids - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_list_collection_ids_with_metadata` - interceptor in new development instead of the `post_list_collection_ids` interceptor. - When both interceptors are used, this `post_list_collection_ids_with_metadata` interceptor runs after the - `post_list_collection_ids` interceptor. The (possibly modified) response returned by - `post_list_collection_ids` will be passed to - `post_list_collection_ids_with_metadata`. - """ - return response, metadata - - def pre_list_documents( - self, - request: firestore.ListDocumentsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[firestore.ListDocumentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_documents - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_list_documents( - self, response: firestore.ListDocumentsResponse - ) -> firestore.ListDocumentsResponse: - """Post-rpc interceptor for list_documents - - DEPRECATED. Please use the `post_list_documents_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_list_documents` interceptor runs - before the `post_list_documents_with_metadata` interceptor. - """ - return response - - def post_list_documents_with_metadata( - self, - response: firestore.ListDocumentsResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.ListDocumentsResponse, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for list_documents - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_list_documents_with_metadata` - interceptor in new development instead of the `post_list_documents` interceptor. - When both interceptors are used, this `post_list_documents_with_metadata` interceptor runs after the - `post_list_documents` interceptor. The (possibly modified) response returned by - `post_list_documents` will be passed to - `post_list_documents_with_metadata`. - """ - return response, metadata - - def pre_partition_query( - self, - request: firestore.PartitionQueryRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.PartitionQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for partition_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_partition_query( - self, response: firestore.PartitionQueryResponse - ) -> firestore.PartitionQueryResponse: - """Post-rpc interceptor for partition_query - - DEPRECATED. Please use the `post_partition_query_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_partition_query` interceptor runs - before the `post_partition_query_with_metadata` interceptor. - """ - return response - - def post_partition_query_with_metadata( - self, - response: firestore.PartitionQueryResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.PartitionQueryResponse, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for partition_query - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_partition_query_with_metadata` - interceptor in new development instead of the `post_partition_query` interceptor. - When both interceptors are used, this `post_partition_query_with_metadata` interceptor runs after the - `post_partition_query` interceptor. The (possibly modified) response returned by - `post_partition_query` will be passed to - `post_partition_query_with_metadata`. - """ - return response, metadata - - def pre_rollback( - self, - request: firestore.RollbackRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[firestore.RollbackRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for rollback - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def pre_run_aggregation_query( - self, - request: firestore.RunAggregationQueryRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.RunAggregationQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for run_aggregation_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_run_aggregation_query( - self, response: rest_streaming.ResponseIterator - ) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for run_aggregation_query - - DEPRECATED. Please use the `post_run_aggregation_query_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_run_aggregation_query` interceptor runs - before the `post_run_aggregation_query_with_metadata` interceptor. - """ - return response - - def post_run_aggregation_query_with_metadata( - self, - response: rest_streaming.ResponseIterator, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for run_aggregation_query - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_run_aggregation_query_with_metadata` - interceptor in new development instead of the `post_run_aggregation_query` interceptor. - When both interceptors are used, this `post_run_aggregation_query_with_metadata` interceptor runs after the - `post_run_aggregation_query` interceptor. The (possibly modified) response returned by - `post_run_aggregation_query` will be passed to - `post_run_aggregation_query_with_metadata`. - """ - return response, metadata - - def pre_run_query( - self, - request: firestore.RunQueryRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[firestore.RunQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for run_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_run_query( - self, response: rest_streaming.ResponseIterator - ) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for run_query - - DEPRECATED. Please use the `post_run_query_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_run_query` interceptor runs - before the `post_run_query_with_metadata` interceptor. - """ - return response - - def post_run_query_with_metadata( - self, - response: rest_streaming.ResponseIterator, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for run_query - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_run_query_with_metadata` - interceptor in new development instead of the `post_run_query` interceptor. - When both interceptors are used, this `post_run_query_with_metadata` interceptor runs after the - `post_run_query` interceptor. The (possibly modified) response returned by - `post_run_query` will be passed to - `post_run_query_with_metadata`. - """ - return response, metadata - - def pre_update_document( - self, - request: firestore.UpdateDocumentRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.UpdateDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for update_document - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_update_document( - self, response: gf_document.Document - ) -> gf_document.Document: - """Post-rpc interceptor for update_document - - DEPRECATED. Please use the `post_update_document_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_update_document` interceptor runs - before the `post_update_document_with_metadata` interceptor. - """ - return response - - def post_update_document_with_metadata( - self, - response: gf_document.Document, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[gf_document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_document - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_update_document_with_metadata` - interceptor in new development instead of the `post_update_document` interceptor. - When both interceptors are used, this `post_update_document_with_metadata` interceptor runs after the - `post_update_document` interceptor. The (possibly modified) response returned by - `post_update_document` will be passed to - `post_update_document_with_metadata`. - """ - return response, metadata - - def pre_cancel_operation( - self, - request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_cancel_operation(self, response: None) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, - request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_delete_operation(self, response: None) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, - request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class FirestoreRestStub: - _session: AuthorizedSession - _host: str - _interceptor: FirestoreRestInterceptor - - -class FirestoreRestTransport(_BaseFirestoreRestTransport): - """REST backend synchronous transport for Firestore. - - The Cloud Firestore service. - - Cloud Firestore is a fast, fully managed, serverless, - cloud-native NoSQL document database that simplifies storing, - syncing, and querying data for your mobile, web, and IoT apps at - global scale. Its client libraries provide live synchronization - and offline support, while its security features and - integrations with Firebase and Google Cloud Platform accelerate - building truly serverless apps. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__( - self, - *, - host: str = "firestore.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[FirestoreRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or FirestoreRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _BatchGetDocuments( - _BaseFirestoreRestTransport._BaseBatchGetDocuments, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.BatchGetDocuments") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - stream=True, - ) - return response - - def __call__( - self, - request: firestore.BatchGetDocumentsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> rest_streaming.ResponseIterator: - r"""Call the batch get documents method over HTTP. - - Args: - request (~.firestore.BatchGetDocumentsRequest): - The request object. The request for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore.BatchGetDocumentsResponse: - The streamed response for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseBatchGetDocuments._get_http_options() - ) - - request, metadata = self._interceptor.pre_batch_get_documents( - request, metadata - ) - transcoded_request = _BaseFirestoreRestTransport._BaseBatchGetDocuments._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreRestTransport._BaseBatchGetDocuments._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreRestTransport._BaseBatchGetDocuments._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.BatchGetDocuments", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "BatchGetDocuments", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._BatchGetDocuments._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator( - response, firestore.BatchGetDocumentsResponse - ) - - resp = self._interceptor.post_batch_get_documents(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_get_documents_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - http_response = { - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.batch_get_documents", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "BatchGetDocuments", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _BatchWrite(_BaseFirestoreRestTransport._BaseBatchWrite, FirestoreRestStub): - def __hash__(self): - return hash("FirestoreRestTransport.BatchWrite") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore.BatchWriteRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.BatchWriteResponse: - r"""Call the batch write method over HTTP. - - Args: - request (~.firestore.BatchWriteRequest): - The request object. The request for - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore.BatchWriteResponse: - The response from - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseBatchWrite._get_http_options() - ) - - request, metadata = self._interceptor.pre_batch_write(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BaseBatchWrite._get_transcoded_request( - http_options, request - ) - ) - - body = _BaseFirestoreRestTransport._BaseBatchWrite._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseBatchWrite._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.BatchWrite", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "BatchWrite", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._BatchWrite._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.BatchWriteResponse() - pb_resp = firestore.BatchWriteResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_batch_write(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_write_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = firestore.BatchWriteResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.batch_write", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "BatchWrite", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _BeginTransaction( - _BaseFirestoreRestTransport._BaseBeginTransaction, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.BeginTransaction") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore.BeginTransactionRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.BeginTransactionResponse: - r"""Call the begin transaction method over HTTP. - - Args: - request (~.firestore.BeginTransactionRequest): - The request object. The request for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore.BeginTransactionResponse: - The response for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseBeginTransaction._get_http_options() - ) - - request, metadata = self._interceptor.pre_begin_transaction( - request, metadata - ) - transcoded_request = _BaseFirestoreRestTransport._BaseBeginTransaction._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreRestTransport._BaseBeginTransaction._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreRestTransport._BaseBeginTransaction._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.BeginTransaction", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "BeginTransaction", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._BeginTransaction._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.BeginTransactionResponse() - pb_resp = firestore.BeginTransactionResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_begin_transaction(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_begin_transaction_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = firestore.BeginTransactionResponse.to_json( - response - ) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.begin_transaction", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "BeginTransaction", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Commit(_BaseFirestoreRestTransport._BaseCommit, FirestoreRestStub): - def __hash__(self): - return hash("FirestoreRestTransport.Commit") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore.CommitRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.CommitResponse: - r"""Call the commit method over HTTP. - - Args: - request (~.firestore.CommitRequest): - The request object. The request for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore.CommitResponse: - The response for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - - """ - - http_options = _BaseFirestoreRestTransport._BaseCommit._get_http_options() - - request, metadata = self._interceptor.pre_commit(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BaseCommit._get_transcoded_request( - http_options, request - ) - ) - - body = _BaseFirestoreRestTransport._BaseCommit._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseCommit._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.Commit", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "Commit", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._Commit._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.CommitResponse() - pb_resp = firestore.CommitResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_commit(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_commit_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = firestore.CommitResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.commit", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "Commit", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateDocument( - _BaseFirestoreRestTransport._BaseCreateDocument, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.CreateDocument") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore.CreateDocumentRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> document.Document: - r"""Call the create document method over HTTP. - - Args: - request (~.firestore.CreateDocumentRequest): - The request object. The request for - [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.document.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseCreateDocument._get_http_options() - ) - - request, metadata = self._interceptor.pre_create_document(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BaseCreateDocument._get_transcoded_request( - http_options, request - ) - ) - - body = ( - _BaseFirestoreRestTransport._BaseCreateDocument._get_request_body_json( - transcoded_request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseCreateDocument._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.CreateDocument", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "CreateDocument", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._CreateDocument._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = document.Document() - pb_resp = document.Document.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_document(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_document_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = document.Document.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.create_document", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "CreateDocument", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteDocument( - _BaseFirestoreRestTransport._BaseDeleteDocument, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.DeleteDocument") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore.DeleteDocumentRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ): - r"""Call the delete document method over HTTP. - - Args: - request (~.firestore.DeleteDocumentRequest): - The request object. The request for - [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseDeleteDocument._get_http_options() - ) - - request, metadata = self._interceptor.pre_delete_document(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BaseDeleteDocument._get_transcoded_request( - http_options, request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseDeleteDocument._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.DeleteDocument", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "DeleteDocument", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._DeleteDocument._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetDocument(_BaseFirestoreRestTransport._BaseGetDocument, FirestoreRestStub): - def __hash__(self): - return hash("FirestoreRestTransport.GetDocument") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore.GetDocumentRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> document.Document: - r"""Call the get document method over HTTP. - - Args: - request (~.firestore.GetDocumentRequest): - The request object. The request for - [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.document.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseGetDocument._get_http_options() - ) - - request, metadata = self._interceptor.pre_get_document(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BaseGetDocument._get_transcoded_request( - http_options, request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseGetDocument._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.GetDocument", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "GetDocument", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._GetDocument._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = document.Document() - pb_resp = document.Document.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_document(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_document_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = document.Document.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.get_document", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "GetDocument", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListCollectionIds( - _BaseFirestoreRestTransport._BaseListCollectionIds, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.ListCollectionIds") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore.ListCollectionIdsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.ListCollectionIdsResponse: - r"""Call the list collection ids method over HTTP. - - Args: - request (~.firestore.ListCollectionIdsRequest): - The request object. The request for - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore.ListCollectionIdsResponse: - The response from - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseListCollectionIds._get_http_options() - ) - - request, metadata = self._interceptor.pre_list_collection_ids( - request, metadata - ) - transcoded_request = _BaseFirestoreRestTransport._BaseListCollectionIds._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreRestTransport._BaseListCollectionIds._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreRestTransport._BaseListCollectionIds._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.ListCollectionIds", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "ListCollectionIds", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._ListCollectionIds._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.ListCollectionIdsResponse() - pb_resp = firestore.ListCollectionIdsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_collection_ids(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_collection_ids_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = firestore.ListCollectionIdsResponse.to_json( - response - ) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.list_collection_ids", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "ListCollectionIds", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDocuments( - _BaseFirestoreRestTransport._BaseListDocuments, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.ListDocuments") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: firestore.ListDocumentsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.ListDocumentsResponse: - r"""Call the list documents method over HTTP. - - Args: - request (~.firestore.ListDocumentsRequest): - The request object. The request for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore.ListDocumentsResponse: - The response for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseListDocuments._get_http_options() - ) - - request, metadata = self._interceptor.pre_list_documents(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BaseListDocuments._get_transcoded_request( - http_options, request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseListDocuments._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.ListDocuments", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "ListDocuments", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._ListDocuments._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.ListDocumentsResponse() - pb_resp = firestore.ListDocumentsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_documents(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_documents_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = firestore.ListDocumentsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.list_documents", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "ListDocuments", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Listen(_BaseFirestoreRestTransport._BaseListen, FirestoreRestStub): - def __hash__(self): - return hash("FirestoreRestTransport.Listen") - - def __call__( - self, - request: firestore.ListenRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> rest_streaming.ResponseIterator: - raise NotImplementedError( - "Method Listen is not available over REST transport" - ) - - class _PartitionQuery( - _BaseFirestoreRestTransport._BasePartitionQuery, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.PartitionQuery") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore.PartitionQueryRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> firestore.PartitionQueryResponse: - r"""Call the partition query method over HTTP. - - Args: - request (~.firestore.PartitionQueryRequest): - The request object. The request for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore.PartitionQueryResponse: - The response for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - - """ - - http_options = ( - _BaseFirestoreRestTransport._BasePartitionQuery._get_http_options() - ) - - request, metadata = self._interceptor.pre_partition_query(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BasePartitionQuery._get_transcoded_request( - http_options, request - ) - ) - - body = ( - _BaseFirestoreRestTransport._BasePartitionQuery._get_request_body_json( - transcoded_request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BasePartitionQuery._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.PartitionQuery", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "PartitionQuery", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._PartitionQuery._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = firestore.PartitionQueryResponse() - pb_resp = firestore.PartitionQueryResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_partition_query(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_partition_query_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = firestore.PartitionQueryResponse.to_json( - response - ) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.partition_query", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "PartitionQuery", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Rollback(_BaseFirestoreRestTransport._BaseRollback, FirestoreRestStub): - def __hash__(self): - return hash("FirestoreRestTransport.Rollback") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore.RollbackRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ): - r"""Call the rollback method over HTTP. - - Args: - request (~.firestore.RollbackRequest): - The request object. The request for - [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseFirestoreRestTransport._BaseRollback._get_http_options() - - request, metadata = self._interceptor.pre_rollback(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BaseRollback._get_transcoded_request( - http_options, request - ) - ) - - body = _BaseFirestoreRestTransport._BaseRollback._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseRollback._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.Rollback", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "Rollback", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._Rollback._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _RunAggregationQuery( - _BaseFirestoreRestTransport._BaseRunAggregationQuery, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.RunAggregationQuery") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - stream=True, - ) - return response - - def __call__( - self, - request: firestore.RunAggregationQueryRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> rest_streaming.ResponseIterator: - r"""Call the run aggregation query method over HTTP. - - Args: - request (~.firestore.RunAggregationQueryRequest): - The request object. The request for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore.RunAggregationQueryResponse: - The response for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseRunAggregationQuery._get_http_options() - ) - - request, metadata = self._interceptor.pre_run_aggregation_query( - request, metadata - ) - transcoded_request = _BaseFirestoreRestTransport._BaseRunAggregationQuery._get_transcoded_request( - http_options, request - ) - - body = _BaseFirestoreRestTransport._BaseRunAggregationQuery._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = _BaseFirestoreRestTransport._BaseRunAggregationQuery._get_query_params_json( - transcoded_request - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.RunAggregationQuery", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "RunAggregationQuery", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._RunAggregationQuery._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator( - response, firestore.RunAggregationQueryResponse - ) - - resp = self._interceptor.post_run_aggregation_query(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_run_aggregation_query_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - http_response = { - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.run_aggregation_query", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "RunAggregationQuery", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RunQuery(_BaseFirestoreRestTransport._BaseRunQuery, FirestoreRestStub): - def __hash__(self): - return hash("FirestoreRestTransport.RunQuery") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - stream=True, - ) - return response - - def __call__( - self, - request: firestore.RunQueryRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> rest_streaming.ResponseIterator: - r"""Call the run query method over HTTP. - - Args: - request (~.firestore.RunQueryRequest): - The request object. The request for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore.RunQueryResponse: - The response for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - - """ - - http_options = _BaseFirestoreRestTransport._BaseRunQuery._get_http_options() - - request, metadata = self._interceptor.pre_run_query(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BaseRunQuery._get_transcoded_request( - http_options, request - ) - ) - - body = _BaseFirestoreRestTransport._BaseRunQuery._get_request_body_json( - transcoded_request - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseRunQuery._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.RunQuery", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "RunQuery", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._RunQuery._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator(response, firestore.RunQueryResponse) - - resp = self._interceptor.post_run_query(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_run_query_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - http_response = { - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.run_query", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "RunQuery", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDocument( - _BaseFirestoreRestTransport._BaseUpdateDocument, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.UpdateDocument") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: firestore.UpdateDocumentRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gf_document.Document: - r"""Call the update document method over HTTP. - - Args: - request (~.firestore.UpdateDocumentRequest): - The request object. The request for - [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gf_document.Document: - A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseUpdateDocument._get_http_options() - ) - - request, metadata = self._interceptor.pre_update_document(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BaseUpdateDocument._get_transcoded_request( - http_options, request - ) - ) - - body = ( - _BaseFirestoreRestTransport._BaseUpdateDocument._get_request_body_json( - transcoded_request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseUpdateDocument._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.UpdateDocument", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "UpdateDocument", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._UpdateDocument._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gf_document.Document() - pb_resp = gf_document.Document.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_document(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_document_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = gf_document.Document.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.update_document", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "UpdateDocument", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Write(_BaseFirestoreRestTransport._BaseWrite, FirestoreRestStub): - def __hash__(self): - return hash("FirestoreRestTransport.Write") - - def __call__( - self, - request: firestore.WriteRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> rest_streaming.ResponseIterator: - raise NotImplementedError( - "Method Write is not available over REST transport" - ) - - @property - def batch_get_documents( - self, - ) -> Callable[ - [firestore.BatchGetDocumentsRequest], firestore.BatchGetDocumentsResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchGetDocuments(self._session, self._host, self._interceptor) # type: ignore - - @property - def batch_write( - self, - ) -> Callable[[firestore.BatchWriteRequest], firestore.BatchWriteResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchWrite(self._session, self._host, self._interceptor) # type: ignore - - @property - def begin_transaction( - self, - ) -> Callable[ - [firestore.BeginTransactionRequest], firestore.BeginTransactionResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BeginTransaction(self._session, self._host, self._interceptor) # type: ignore - - @property - def commit(self) -> Callable[[firestore.CommitRequest], firestore.CommitResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Commit(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_document( - self, - ) -> Callable[[firestore.CreateDocumentRequest], document.Document]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDocument(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_document( - self, - ) -> Callable[[firestore.DeleteDocumentRequest], empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDocument(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_document( - self, - ) -> Callable[[firestore.GetDocumentRequest], document.Document]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDocument(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_collection_ids( - self, - ) -> Callable[ - [firestore.ListCollectionIdsRequest], firestore.ListCollectionIdsResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListCollectionIds(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_documents( - self, - ) -> Callable[[firestore.ListDocumentsRequest], firestore.ListDocumentsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDocuments(self._session, self._host, self._interceptor) # type: ignore - - @property - def listen(self) -> Callable[[firestore.ListenRequest], firestore.ListenResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Listen(self._session, self._host, self._interceptor) # type: ignore - - @property - def partition_query( - self, - ) -> Callable[[firestore.PartitionQueryRequest], firestore.PartitionQueryResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._PartitionQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def rollback(self) -> Callable[[firestore.RollbackRequest], empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Rollback(self._session, self._host, self._interceptor) # type: ignore - - @property - def run_aggregation_query( - self, - ) -> Callable[ - [firestore.RunAggregationQueryRequest], firestore.RunAggregationQueryResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RunAggregationQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def run_query( - self, - ) -> Callable[[firestore.RunQueryRequest], firestore.RunQueryResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RunQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_document( - self, - ) -> Callable[[firestore.UpdateDocumentRequest], gf_document.Document]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDocument(self._session, self._host, self._interceptor) # type: ignore - - @property - def write(self) -> Callable[[firestore.WriteRequest], firestore.WriteResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Write(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation( - _BaseFirestoreRestTransport._BaseCancelOperation, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__( - self, - request: operations_pb2.CancelOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseCancelOperation._get_http_options() - ) - - request, metadata = self._interceptor.pre_cancel_operation( - request, metadata - ) - transcoded_request = _BaseFirestoreRestTransport._BaseCancelOperation._get_transcoded_request( - http_options, request - ) - - body = ( - _BaseFirestoreRestTransport._BaseCancelOperation._get_request_body_json( - transcoded_request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseCancelOperation._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.CancelOperation", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._CancelOperation._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation( - _BaseFirestoreRestTransport._BaseDeleteOperation, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: operations_pb2.DeleteOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseDeleteOperation._get_http_options() - ) - - request, metadata = self._interceptor.pre_delete_operation( - request, metadata - ) - transcoded_request = _BaseFirestoreRestTransport._BaseDeleteOperation._get_transcoded_request( - http_options, request - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseDeleteOperation._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.DeleteOperation", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._DeleteOperation._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation( - _BaseFirestoreRestTransport._BaseGetOperation, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseGetOperation._get_http_options() - ) - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BaseGetOperation._get_transcoded_request( - http_options, request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseGetOperation._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.GetOperation", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._GetOperation._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreAsyncClient.GetOperation", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations( - _BaseFirestoreRestTransport._BaseListOperations, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__( - self, - request: operations_pb2.ListOperationsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseListOperations._get_http_options() - ) - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = ( - _BaseFirestoreRestTransport._BaseListOperations._get_transcoded_request( - http_options, request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseListOperations._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.ListOperations", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._ListOperations._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreAsyncClient.ListOperations", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("FirestoreRestTransport",) diff --git a/google/cloud/firestore_v1/services/firestore/transports/rest_base.py b/google/cloud/firestore_v1/services/firestore/transports/rest_base.py deleted file mode 100644 index 1d95cd16ea..0000000000 --- a/google/cloud/firestore_v1/services/firestore/transports/rest_base.py +++ /dev/null @@ -1,1004 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import FirestoreTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseFirestoreRestTransport(FirestoreTransport): - """Base REST backend transport for Firestore. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__( - self, - *, - host: str = "firestore.googleapis.com", - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'firestore.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - class _BaseBatchGetDocuments: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{database=projects/*/databases/*}/documents:batchGet", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.BatchGetDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseBatchGetDocuments._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseBatchWrite: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{database=projects/*/databases/*}/documents:batchWrite", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.BatchWriteRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseBatchWrite._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseBeginTransaction: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{database=projects/*/databases/*}/documents:beginTransaction", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.BeginTransactionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseBeginTransaction._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCommit: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{database=projects/*/databases/*}/documents:commit", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.CommitRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseCommit._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateDocument: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents/**}/{collection_id}", - "body": "document", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.CreateDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseCreateDocument._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDocument: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/documents/*/**}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.DeleteDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseDeleteDocument._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDocument: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/documents/*/**}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.GetDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseGetDocument._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListCollectionIds: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents}:listCollectionIds", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.ListCollectionIdsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseListCollectionIds._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDocuments: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}", - }, - { - "method": "get", - "uri": "/v1/{parent=projects/*/databases/*/documents}/{collection_id}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.ListDocumentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseListDocuments._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListen: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - class _BasePartitionQuery: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents}:partitionQuery", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:partitionQuery", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.PartitionQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BasePartitionQuery._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRollback: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{database=projects/*/databases/*}/documents:rollback", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.RollbackRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseRollback._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRunAggregationQuery: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents}:runAggregationQuery", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:runAggregationQuery", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.RunAggregationQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseRunAggregationQuery._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRunQuery: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents}:runQuery", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{parent=projects/*/databases/*/documents/*/**}:runQuery", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.RunQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseRunQuery._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDocument: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{document.name=projects/*/databases/*/documents/*/**}", - "body": "document", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.UpdateDocumentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseUpdateDocument._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseWrite: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/databases/*/operations/*}:cancel", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request["body"]) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/databases/*/operations/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*/operations/*}", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/databases/*}/operations", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - return query_params - - -__all__ = ("_BaseFirestoreRestTransport",) diff --git a/google/cloud/firestore_v1/types/__init__.py b/google/cloud/firestore_v1/types/__init__.py deleted file mode 100644 index ae1004e132..0000000000 --- a/google/cloud/firestore_v1/types/__init__.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .aggregation_result import ( - AggregationResult, -) -from .bloom_filter import ( - BitSequence, - BloomFilter, -) -from .common import ( - DocumentMask, - Precondition, - TransactionOptions, -) -from .document import ( - ArrayValue, - Document, - MapValue, - Value, -) -from .firestore import ( - BatchGetDocumentsRequest, - BatchGetDocumentsResponse, - BatchWriteRequest, - BatchWriteResponse, - BeginTransactionRequest, - BeginTransactionResponse, - CommitRequest, - CommitResponse, - CreateDocumentRequest, - DeleteDocumentRequest, - GetDocumentRequest, - ListCollectionIdsRequest, - ListCollectionIdsResponse, - ListDocumentsRequest, - ListDocumentsResponse, - ListenRequest, - ListenResponse, - PartitionQueryRequest, - PartitionQueryResponse, - RollbackRequest, - RunAggregationQueryRequest, - RunAggregationQueryResponse, - RunQueryRequest, - RunQueryResponse, - Target, - TargetChange, - UpdateDocumentRequest, - WriteRequest, - WriteResponse, -) -from .query import ( - Cursor, - StructuredAggregationQuery, - StructuredQuery, -) -from .query_profile import ( - ExecutionStats, - ExplainMetrics, - ExplainOptions, - PlanSummary, -) -from .write import ( - DocumentChange, - DocumentDelete, - DocumentRemove, - DocumentTransform, - ExistenceFilter, - Write, - WriteResult, -) - -__all__ = ( - "AggregationResult", - "BitSequence", - "BloomFilter", - "DocumentMask", - "Precondition", - "TransactionOptions", - "ArrayValue", - "Document", - "MapValue", - "Value", - "BatchGetDocumentsRequest", - "BatchGetDocumentsResponse", - "BatchWriteRequest", - "BatchWriteResponse", - "BeginTransactionRequest", - "BeginTransactionResponse", - "CommitRequest", - "CommitResponse", - "CreateDocumentRequest", - "DeleteDocumentRequest", - "GetDocumentRequest", - "ListCollectionIdsRequest", - "ListCollectionIdsResponse", - "ListDocumentsRequest", - "ListDocumentsResponse", - "ListenRequest", - "ListenResponse", - "PartitionQueryRequest", - "PartitionQueryResponse", - "RollbackRequest", - "RunAggregationQueryRequest", - "RunAggregationQueryResponse", - "RunQueryRequest", - "RunQueryResponse", - "Target", - "TargetChange", - "UpdateDocumentRequest", - "WriteRequest", - "WriteResponse", - "Cursor", - "StructuredAggregationQuery", - "StructuredQuery", - "ExecutionStats", - "ExplainMetrics", - "ExplainOptions", - "PlanSummary", - "DocumentChange", - "DocumentDelete", - "DocumentRemove", - "DocumentTransform", - "ExistenceFilter", - "Write", - "WriteResult", -) diff --git a/google/cloud/firestore_v1/types/aggregation_result.py b/google/cloud/firestore_v1/types/aggregation_result.py deleted file mode 100644 index 3c649dc8a2..0000000000 --- a/google/cloud/firestore_v1/types/aggregation_result.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_v1.types import document - - -__protobuf__ = proto.module( - package="google.firestore.v1", - manifest={ - "AggregationResult", - }, -) - - -class AggregationResult(proto.Message): - r"""The result of a single bucket from a Firestore aggregation query. - - The keys of ``aggregate_fields`` are the same for all results in an - aggregation query, unlike document queries which can have different - fields present for each result. - - Attributes: - aggregate_fields (MutableMapping[str, google.cloud.firestore_v1.types.Value]): - The result of the aggregation functions, ex: - ``COUNT(*) AS total_docs``. - - The key is the - [alias][google.firestore.v1.StructuredAggregationQuery.Aggregation.alias] - assigned to the aggregation function on input and the size - of this map equals the number of aggregation functions in - the query. - """ - - aggregate_fields: MutableMapping[str, document.Value] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message=document.Value, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/bloom_filter.py b/google/cloud/firestore_v1/types/bloom_filter.py deleted file mode 100644 index f38386cbe1..0000000000 --- a/google/cloud/firestore_v1/types/bloom_filter.py +++ /dev/null @@ -1,110 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1", - manifest={ - "BitSequence", - "BloomFilter", - }, -) - - -class BitSequence(proto.Message): - r"""A sequence of bits, encoded in a byte array. - - Each byte in the ``bitmap`` byte array stores 8 bits of the - sequence. The only exception is the last byte, which may store 8 *or - fewer* bits. The ``padding`` defines the number of bits of the last - byte to be ignored as "padding". The values of these "padding" bits - are unspecified and must be ignored. - - To retrieve the first bit, bit 0, calculate: - ``(bitmap[0] & 0x01) != 0``. To retrieve the second bit, bit 1, - calculate: ``(bitmap[0] & 0x02) != 0``. To retrieve the third bit, - bit 2, calculate: ``(bitmap[0] & 0x04) != 0``. To retrieve the - fourth bit, bit 3, calculate: ``(bitmap[0] & 0x08) != 0``. To - retrieve bit n, calculate: - ``(bitmap[n / 8] & (0x01 << (n % 8))) != 0``. - - The "size" of a ``BitSequence`` (the number of bits it contains) is - calculated by this formula: ``(bitmap.length * 8) - padding``. - - Attributes: - bitmap (bytes): - The bytes that encode the bit sequence. - May have a length of zero. - padding (int): - The number of bits of the last byte in ``bitmap`` to ignore - as "padding". If the length of ``bitmap`` is zero, then this - value must be ``0``. Otherwise, this value must be between 0 - and 7, inclusive. - """ - - bitmap: bytes = proto.Field( - proto.BYTES, - number=1, - ) - padding: int = proto.Field( - proto.INT32, - number=2, - ) - - -class BloomFilter(proto.Message): - r"""A bloom filter (https://en.wikipedia.org/wiki/Bloom_filter). - - The bloom filter hashes the entries with MD5 and treats the - resulting 128-bit hash as 2 distinct 64-bit hash values, interpreted - as unsigned integers using 2's complement encoding. - - These two hash values, named ``h1`` and ``h2``, are then used to - compute the ``hash_count`` hash values using the formula, starting - at ``i=0``: - - :: - - h(i) = h1 + (i * h2) - - These resulting values are then taken modulo the number of bits in - the bloom filter to get the bits of the bloom filter to test for the - given entry. - - Attributes: - bits (google.cloud.firestore_v1.types.BitSequence): - The bloom filter data. - hash_count (int): - The number of hashes used by the algorithm. - """ - - bits: "BitSequence" = proto.Field( - proto.MESSAGE, - number=1, - message="BitSequence", - ) - hash_count: int = proto.Field( - proto.INT32, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/common.py b/google/cloud/firestore_v1/types/common.py deleted file mode 100644 index 01fb3d2633..0000000000 --- a/google/cloud/firestore_v1/types/common.py +++ /dev/null @@ -1,172 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1", - manifest={ - "DocumentMask", - "Precondition", - "TransactionOptions", - }, -) - - -class DocumentMask(proto.Message): - r"""A set of field paths on a document. Used to restrict a get or update - operation on a document to a subset of its fields. This is different - from standard field masks, as this is always scoped to a - [Document][google.firestore.v1.Document], and takes in account the - dynamic nature of [Value][google.firestore.v1.Value]. - - Attributes: - field_paths (MutableSequence[str]): - The list of field paths in the mask. See - [Document.fields][google.firestore.v1.Document.fields] for a - field path syntax reference. - """ - - field_paths: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class Precondition(proto.Message): - r"""A precondition on a document, used for conditional - operations. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - exists (bool): - When set to ``true``, the target document must exist. When - set to ``false``, the target document must not exist. - - This field is a member of `oneof`_ ``condition_type``. - update_time (google.protobuf.timestamp_pb2.Timestamp): - When set, the target document must exist and - have been last updated at that time. Timestamp - must be microsecond aligned. - - This field is a member of `oneof`_ ``condition_type``. - """ - - exists: bool = proto.Field( - proto.BOOL, - number=1, - oneof="condition_type", - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - oneof="condition_type", - message=timestamp_pb2.Timestamp, - ) - - -class TransactionOptions(proto.Message): - r"""Options for creating a new transaction. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - read_only (google.cloud.firestore_v1.types.TransactionOptions.ReadOnly): - The transaction can only be used for read - operations. - - This field is a member of `oneof`_ ``mode``. - read_write (google.cloud.firestore_v1.types.TransactionOptions.ReadWrite): - The transaction can be used for both read and - write operations. - - This field is a member of `oneof`_ ``mode``. - """ - - class ReadWrite(proto.Message): - r"""Options for a transaction that can be used to read and write - documents. - Firestore does not allow 3rd party auth requests to create - read-write. transactions. - - Attributes: - retry_transaction (bytes): - An optional transaction to retry. - """ - - retry_transaction: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - class ReadOnly(proto.Message): - r"""Options for a transaction that can only be used to read - documents. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads documents at the given time. - - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - """ - - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - oneof="consistency_selector", - message=timestamp_pb2.Timestamp, - ) - - read_only: ReadOnly = proto.Field( - proto.MESSAGE, - number=2, - oneof="mode", - message=ReadOnly, - ) - read_write: ReadWrite = proto.Field( - proto.MESSAGE, - number=3, - oneof="mode", - message=ReadWrite, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/document.py b/google/cloud/firestore_v1/types/document.py deleted file mode 100644 index 22fe79b736..0000000000 --- a/google/cloud/firestore_v1/types/document.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import latlng_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1", - manifest={ - "Document", - "Value", - "ArrayValue", - "MapValue", - }, -) - - -class Document(proto.Message): - r"""A Firestore document. - - Must not exceed 1 MiB - 4 bytes. - - Attributes: - name (str): - The resource name of the document, for example - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - fields (MutableMapping[str, google.cloud.firestore_v1.types.Value]): - The document's fields. - - The map keys represent field names. - - Field names matching the regular expression ``__.*__`` are - reserved. Reserved field names are forbidden except in - certain documented contexts. The field names, represented as - UTF-8, must not exceed 1,500 bytes and cannot be empty. - - Field paths may be used in other contexts to refer to - structured fields defined here. For ``map_value``, the field - path is represented by a dot-delimited (``.``) string of - segments. Each segment is either a simple field name - (defined below) or a quoted field name. For example, the - structured field - ``"foo" : { map_value: { "x&y" : { string_value: "hello" }}}`` - would be represented by the field path - :literal:`foo.`x&y\``. - - A simple field name contains only characters ``a`` to ``z``, - ``A`` to ``Z``, ``0`` to ``9``, or ``_``, and must not start - with ``0`` to ``9``. For example, ``foo_bar_17``. - - A quoted field name starts and ends with :literal:`\`` and - may contain any character. Some characters, including - :literal:`\``, must be escaped using a ``\``. For example, - :literal:`\`x&y\`` represents ``x&y`` and - :literal:`\`bak\\`tik\`` represents :literal:`bak`tik`. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the document was created. - - This value increases monotonically when a document is - deleted then recreated. It can also be compared to values - from other documents and the ``read_time`` of a query. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the document was last - changed. - - This value is initially set to the ``create_time`` then - increases monotonically with each change to the document. It - can also be compared to values from other documents and the - ``read_time`` of a query. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - fields: MutableMapping[str, "Value"] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message="Value", - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class Value(proto.Message): - r"""A message that can hold any of the supported value types. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - null_value (google.protobuf.struct_pb2.NullValue): - A null value. - - This field is a member of `oneof`_ ``value_type``. - boolean_value (bool): - A boolean value. - - This field is a member of `oneof`_ ``value_type``. - integer_value (int): - An integer value. - - This field is a member of `oneof`_ ``value_type``. - double_value (float): - A double value. - - This field is a member of `oneof`_ ``value_type``. - timestamp_value (google.protobuf.timestamp_pb2.Timestamp): - A timestamp value. - - Precise only to microseconds. When stored, any - additional precision is rounded down. - - This field is a member of `oneof`_ ``value_type``. - string_value (str): - A string value. - - The string, represented as UTF-8, must not - exceed 1 MiB - 89 bytes. Only the first 1,500 - bytes of the UTF-8 representation are considered - by queries. - - This field is a member of `oneof`_ ``value_type``. - bytes_value (bytes): - A bytes value. - - Must not exceed 1 MiB - 89 bytes. - Only the first 1,500 bytes are considered by - queries. - - This field is a member of `oneof`_ ``value_type``. - reference_value (str): - A reference to a document. For example: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This field is a member of `oneof`_ ``value_type``. - geo_point_value (google.type.latlng_pb2.LatLng): - A geo point value representing a point on the - surface of Earth. - - This field is a member of `oneof`_ ``value_type``. - array_value (google.cloud.firestore_v1.types.ArrayValue): - An array value. - - Cannot directly contain another array value, - though can contain a map which contains another - array. - - This field is a member of `oneof`_ ``value_type``. - map_value (google.cloud.firestore_v1.types.MapValue): - A map value. - - This field is a member of `oneof`_ ``value_type``. - """ - - null_value: struct_pb2.NullValue = proto.Field( - proto.ENUM, - number=11, - oneof="value_type", - enum=struct_pb2.NullValue, - ) - boolean_value: bool = proto.Field( - proto.BOOL, - number=1, - oneof="value_type", - ) - integer_value: int = proto.Field( - proto.INT64, - number=2, - oneof="value_type", - ) - double_value: float = proto.Field( - proto.DOUBLE, - number=3, - oneof="value_type", - ) - timestamp_value: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - oneof="value_type", - message=timestamp_pb2.Timestamp, - ) - string_value: str = proto.Field( - proto.STRING, - number=17, - oneof="value_type", - ) - bytes_value: bytes = proto.Field( - proto.BYTES, - number=18, - oneof="value_type", - ) - reference_value: str = proto.Field( - proto.STRING, - number=5, - oneof="value_type", - ) - geo_point_value: latlng_pb2.LatLng = proto.Field( - proto.MESSAGE, - number=8, - oneof="value_type", - message=latlng_pb2.LatLng, - ) - array_value: "ArrayValue" = proto.Field( - proto.MESSAGE, - number=9, - oneof="value_type", - message="ArrayValue", - ) - map_value: "MapValue" = proto.Field( - proto.MESSAGE, - number=6, - oneof="value_type", - message="MapValue", - ) - - -class ArrayValue(proto.Message): - r"""An array value. - - Attributes: - values (MutableSequence[google.cloud.firestore_v1.types.Value]): - Values in the array. - """ - - values: MutableSequence["Value"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Value", - ) - - -class MapValue(proto.Message): - r"""A map value. - - Attributes: - fields (MutableMapping[str, google.cloud.firestore_v1.types.Value]): - The map's fields. - - The map keys represent field names. Field names matching the - regular expression ``__.*__`` are reserved. Reserved field - names are forbidden except in certain documented contexts. - The map keys, represented as UTF-8, must not exceed 1,500 - bytes and cannot be empty. - """ - - fields: MutableMapping[str, "Value"] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message="Value", - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/firestore.py b/google/cloud/firestore_v1/types/firestore.py deleted file mode 100644 index 190f55d282..0000000000 --- a/google/cloud/firestore_v1/types/firestore.py +++ /dev/null @@ -1,1759 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_v1.types import aggregation_result -from google.cloud.firestore_v1.types import common -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import query as gf_query -from google.cloud.firestore_v1.types import query_profile -from google.cloud.firestore_v1.types import write -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1", - manifest={ - "GetDocumentRequest", - "ListDocumentsRequest", - "ListDocumentsResponse", - "CreateDocumentRequest", - "UpdateDocumentRequest", - "DeleteDocumentRequest", - "BatchGetDocumentsRequest", - "BatchGetDocumentsResponse", - "BeginTransactionRequest", - "BeginTransactionResponse", - "CommitRequest", - "CommitResponse", - "RollbackRequest", - "RunQueryRequest", - "RunQueryResponse", - "RunAggregationQueryRequest", - "RunAggregationQueryResponse", - "PartitionQueryRequest", - "PartitionQueryResponse", - "WriteRequest", - "WriteResponse", - "ListenRequest", - "ListenResponse", - "Target", - "TargetChange", - "ListCollectionIdsRequest", - "ListCollectionIdsResponse", - "BatchWriteRequest", - "BatchWriteResponse", - }, -) - - -class GetDocumentRequest(proto.Message): - r"""The request for - [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Required. The resource name of the Document to get. In the - format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to return. If not set, returns all - fields. - If the document has a field that is not present - in this mask, that field will not be returned in - the response. - transaction (bytes): - Reads the document in a transaction. - - This field is a member of `oneof`_ ``consistency_selector``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads the version of the document at the - given time. - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=2, - message=common.DocumentMask, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=3, - oneof="consistency_selector", - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - oneof="consistency_selector", - message=timestamp_pb2.Timestamp, - ) - - -class ListDocumentsRequest(proto.Message): - r"""The request for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - For example: - ``projects/my-project/databases/my-database/documents`` or - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - collection_id (str): - Optional. The collection ID, relative to ``parent``, to - list. - - For example: ``chatrooms`` or ``messages``. - - This is optional, and when not provided, Firestore will list - documents from all collections under the provided - ``parent``. - page_size (int): - Optional. The maximum number of documents to - return in a single response. - Firestore may return fewer than this value. - page_token (str): - Optional. A page token, received from a previous - ``ListDocuments`` response. - - Provide this to retrieve the subsequent page. When - paginating, all other parameters (with the exception of - ``page_size``) must match the values set in the request that - generated the page token. - order_by (str): - Optional. The optional ordering of the documents to return. - - For example: ``priority desc, __name__ desc``. - - This mirrors the - [``ORDER BY``][google.firestore.v1.StructuredQuery.order_by] - used in Firestore queries but in a string representation. - When absent, documents are ordered based on - ``__name__ ASC``. - mask (google.cloud.firestore_v1.types.DocumentMask): - Optional. The fields to return. If not set, - returns all fields. - If a document has a field that is not present in - this mask, that field will not be returned in - the response. - transaction (bytes): - Perform the read as part of an already active - transaction. - - This field is a member of `oneof`_ ``consistency_selector``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Perform the read at the provided time. - - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - show_missing (bool): - If the list should show missing documents. - - A document is missing if it does not exist, but there are - sub-documents nested underneath it. When true, such missing - documents will be returned with a key but will not have - fields, - [``create_time``][google.firestore.v1.Document.create_time], - or - [``update_time``][google.firestore.v1.Document.update_time] - set. - - Requests with ``show_missing`` may not specify ``where`` or - ``order_by``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - collection_id: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=6, - ) - mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=7, - message=common.DocumentMask, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=8, - oneof="consistency_selector", - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - oneof="consistency_selector", - message=timestamp_pb2.Timestamp, - ) - show_missing: bool = proto.Field( - proto.BOOL, - number=12, - ) - - -class ListDocumentsResponse(proto.Message): - r"""The response for - [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - - Attributes: - documents (MutableSequence[google.cloud.firestore_v1.types.Document]): - The Documents found. - next_page_token (str): - A token to retrieve the next page of - documents. - If this field is omitted, there are no - subsequent pages. - """ - - @property - def raw_page(self): - return self - - documents: MutableSequence[gf_document.Document] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gf_document.Document, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateDocumentRequest(proto.Message): - r"""The request for - [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. - - Attributes: - parent (str): - Required. The parent resource. For example: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}`` - collection_id (str): - Required. The collection ID, relative to ``parent``, to - list. For example: ``chatrooms``. - document_id (str): - The client-assigned document ID to use for - this document. - Optional. If not specified, an ID will be - assigned by the service. - document (google.cloud.firestore_v1.types.Document): - Required. The document to create. ``name`` must not be set. - mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to return. If not set, returns all - fields. - If the document has a field that is not present - in this mask, that field will not be returned in - the response. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - collection_id: str = proto.Field( - proto.STRING, - number=2, - ) - document_id: str = proto.Field( - proto.STRING, - number=3, - ) - document: gf_document.Document = proto.Field( - proto.MESSAGE, - number=4, - message=gf_document.Document, - ) - mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=5, - message=common.DocumentMask, - ) - - -class UpdateDocumentRequest(proto.Message): - r"""The request for - [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. - - Attributes: - document (google.cloud.firestore_v1.types.Document): - Required. The updated document. - Creates the document if it does not already - exist. - update_mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to update. - None of the field paths in the mask may contain - a reserved name. - - If the document exists on the server and has - fields not referenced in the mask, they are left - unchanged. - Fields referenced in the mask, but not present - in the input document, are deleted from the - document on the server. - mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to return. If not set, returns all - fields. - If the document has a field that is not present - in this mask, that field will not be returned in - the response. - current_document (google.cloud.firestore_v1.types.Precondition): - An optional precondition on the document. - The request will fail if this is set and not met - by the target document. - """ - - document: gf_document.Document = proto.Field( - proto.MESSAGE, - number=1, - message=gf_document.Document, - ) - update_mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=2, - message=common.DocumentMask, - ) - mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=3, - message=common.DocumentMask, - ) - current_document: common.Precondition = proto.Field( - proto.MESSAGE, - number=4, - message=common.Precondition, - ) - - -class DeleteDocumentRequest(proto.Message): - r"""The request for - [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. - - Attributes: - name (str): - Required. The resource name of the Document to delete. In - the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - current_document (google.cloud.firestore_v1.types.Precondition): - An optional precondition on the document. - The request will fail if this is set and not met - by the target document. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - current_document: common.Precondition = proto.Field( - proto.MESSAGE, - number=2, - message=common.Precondition, - ) - - -class BatchGetDocumentsRequest(proto.Message): - r"""The request for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - documents (MutableSequence[str]): - The names of the documents to retrieve. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - The request will fail if any of the document is not a child - resource of the given ``database``. Duplicate names will be - elided. - mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to return. If not set, returns all - fields. - If a document has a field that is not present in - this mask, that field will not be returned in - the response. - transaction (bytes): - Reads documents in a transaction. - - This field is a member of `oneof`_ ``consistency_selector``. - new_transaction (google.cloud.firestore_v1.types.TransactionOptions): - Starts a new transaction and reads the - documents. Defaults to a read-only transaction. - The new transaction ID will be returned as the - first response in the stream. - - This field is a member of `oneof`_ ``consistency_selector``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads documents as they were at the given - time. - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - documents: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=3, - message=common.DocumentMask, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=4, - oneof="consistency_selector", - ) - new_transaction: common.TransactionOptions = proto.Field( - proto.MESSAGE, - number=5, - oneof="consistency_selector", - message=common.TransactionOptions, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - oneof="consistency_selector", - message=timestamp_pb2.Timestamp, - ) - - -class BatchGetDocumentsResponse(proto.Message): - r"""The streamed response for - [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - found (google.cloud.firestore_v1.types.Document): - A document that was requested. - - This field is a member of `oneof`_ ``result``. - missing (str): - A document name that was requested but does not exist. In - the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This field is a member of `oneof`_ ``result``. - transaction (bytes): - The transaction that was started as part of this request. - Will only be set in the first response, and only if - [BatchGetDocumentsRequest.new_transaction][google.firestore.v1.BatchGetDocumentsRequest.new_transaction] - was set in the request. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the document was read. This may be - monotically increasing, in this case the previous documents - in the result stream are guaranteed not to have changed - between their read_time and this one. - """ - - found: gf_document.Document = proto.Field( - proto.MESSAGE, - number=1, - oneof="result", - message=gf_document.Document, - ) - missing: str = proto.Field( - proto.STRING, - number=2, - oneof="result", - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=3, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class BeginTransactionRequest(proto.Message): - r"""The request for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - options (google.cloud.firestore_v1.types.TransactionOptions): - The options for the transaction. - Defaults to a read-write transaction. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - options: common.TransactionOptions = proto.Field( - proto.MESSAGE, - number=2, - message=common.TransactionOptions, - ) - - -class BeginTransactionResponse(proto.Message): - r"""The response for - [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. - - Attributes: - transaction (bytes): - The transaction that was started. - """ - - transaction: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - -class CommitRequest(proto.Message): - r"""The request for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - writes (MutableSequence[google.cloud.firestore_v1.types.Write]): - The writes to apply. - - Always executed atomically and in order. - transaction (bytes): - If set, applies all writes in this - transaction, and commits it. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - writes: MutableSequence[write.Write] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=write.Write, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=3, - ) - - -class CommitResponse(proto.Message): - r"""The response for - [Firestore.Commit][google.firestore.v1.Firestore.Commit]. - - Attributes: - write_results (MutableSequence[google.cloud.firestore_v1.types.WriteResult]): - The result of applying the writes. - - This i-th write result corresponds to the i-th - write in the request. - commit_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the commit occurred. Any read with an - equal or greater ``read_time`` is guaranteed to see the - effects of the commit. - """ - - write_results: MutableSequence[write.WriteResult] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=write.WriteResult, - ) - commit_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class RollbackRequest(proto.Message): - r"""The request for - [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - transaction (bytes): - Required. The transaction to roll back. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class RunQueryRequest(proto.Message): - r"""The request for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents`` or - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - structured_query (google.cloud.firestore_v1.types.StructuredQuery): - A structured query. - - This field is a member of `oneof`_ ``query_type``. - transaction (bytes): - Run the query within an already active - transaction. - The value here is the opaque transaction ID to - execute the query in. - - This field is a member of `oneof`_ ``consistency_selector``. - new_transaction (google.cloud.firestore_v1.types.TransactionOptions): - Starts a new transaction and reads the - documents. Defaults to a read-only transaction. - The new transaction ID will be returned as the - first response in the stream. - - This field is a member of `oneof`_ ``consistency_selector``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads documents as they were at the given - time. - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - explain_options (google.cloud.firestore_v1.types.ExplainOptions): - Optional. Explain options for the query. If - set, additional query statistics will be - returned. If not, only query results will be - returned. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - structured_query: gf_query.StructuredQuery = proto.Field( - proto.MESSAGE, - number=2, - oneof="query_type", - message=gf_query.StructuredQuery, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=5, - oneof="consistency_selector", - ) - new_transaction: common.TransactionOptions = proto.Field( - proto.MESSAGE, - number=6, - oneof="consistency_selector", - message=common.TransactionOptions, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - oneof="consistency_selector", - message=timestamp_pb2.Timestamp, - ) - explain_options: query_profile.ExplainOptions = proto.Field( - proto.MESSAGE, - number=10, - message=query_profile.ExplainOptions, - ) - - -class RunQueryResponse(proto.Message): - r"""The response for - [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - transaction (bytes): - The transaction that was started as part of this request. - Can only be set in the first response, and only if - [RunQueryRequest.new_transaction][google.firestore.v1.RunQueryRequest.new_transaction] - was set in the request. If set, no other fields will be set - in this response. - document (google.cloud.firestore_v1.types.Document): - A query result, not set when reporting - partial progress. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the document was read. This may be - monotonically increasing; in this case, the previous - documents in the result stream are guaranteed not to have - changed between their ``read_time`` and this one. - - If the query returns no results, a response with - ``read_time`` and no ``document`` will be sent, and this - represents the time at which the query was run. - skipped_results (int): - The number of results that have been skipped - due to an offset between the last response and - the current response. - done (bool): - If present, Firestore has completely finished - the request and no more documents will be - returned. - - This field is a member of `oneof`_ ``continuation_selector``. - explain_metrics (google.cloud.firestore_v1.types.ExplainMetrics): - Query explain metrics. This is only present when the - [RunQueryRequest.explain_options][google.firestore.v1.RunQueryRequest.explain_options] - is provided, and it is sent only once with the last response - in the stream. - """ - - transaction: bytes = proto.Field( - proto.BYTES, - number=2, - ) - document: gf_document.Document = proto.Field( - proto.MESSAGE, - number=1, - message=gf_document.Document, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - skipped_results: int = proto.Field( - proto.INT32, - number=4, - ) - done: bool = proto.Field( - proto.BOOL, - number=6, - oneof="continuation_selector", - ) - explain_metrics: query_profile.ExplainMetrics = proto.Field( - proto.MESSAGE, - number=11, - message=query_profile.ExplainMetrics, - ) - - -class RunAggregationQueryRequest(proto.Message): - r"""The request for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents`` or - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - structured_aggregation_query (google.cloud.firestore_v1.types.StructuredAggregationQuery): - An aggregation query. - - This field is a member of `oneof`_ ``query_type``. - transaction (bytes): - Run the aggregation within an already active - transaction. - The value here is the opaque transaction ID to - execute the query in. - - This field is a member of `oneof`_ ``consistency_selector``. - new_transaction (google.cloud.firestore_v1.types.TransactionOptions): - Starts a new transaction as part of the - query, defaulting to read-only. - The new transaction ID will be returned as the - first response in the stream. - - This field is a member of `oneof`_ ``consistency_selector``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Executes the query at the given timestamp. - - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - explain_options (google.cloud.firestore_v1.types.ExplainOptions): - Optional. Explain options for the query. If - set, additional query statistics will be - returned. If not, only query results will be - returned. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - structured_aggregation_query: gf_query.StructuredAggregationQuery = proto.Field( - proto.MESSAGE, - number=2, - oneof="query_type", - message=gf_query.StructuredAggregationQuery, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=4, - oneof="consistency_selector", - ) - new_transaction: common.TransactionOptions = proto.Field( - proto.MESSAGE, - number=5, - oneof="consistency_selector", - message=common.TransactionOptions, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - oneof="consistency_selector", - message=timestamp_pb2.Timestamp, - ) - explain_options: query_profile.ExplainOptions = proto.Field( - proto.MESSAGE, - number=8, - message=query_profile.ExplainOptions, - ) - - -class RunAggregationQueryResponse(proto.Message): - r"""The response for - [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. - - Attributes: - result (google.cloud.firestore_v1.types.AggregationResult): - A single aggregation result. - - Not present when reporting partial progress. - transaction (bytes): - The transaction that was started as part of - this request. - Only present on the first response when the - request requested to start a new transaction. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the aggregate result was computed. This is - always monotonically increasing; in this case, the previous - AggregationResult in the result stream are guaranteed not to - have changed between their ``read_time`` and this one. - - If the query returns no results, a response with - ``read_time`` and no ``result`` will be sent, and this - represents the time at which the query was run. - explain_metrics (google.cloud.firestore_v1.types.ExplainMetrics): - Query explain metrics. This is only present when the - [RunAggregationQueryRequest.explain_options][google.firestore.v1.RunAggregationQueryRequest.explain_options] - is provided, and it is sent only once with the last response - in the stream. - """ - - result: aggregation_result.AggregationResult = proto.Field( - proto.MESSAGE, - number=1, - message=aggregation_result.AggregationResult, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=2, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - explain_metrics: query_profile.ExplainMetrics = proto.Field( - proto.MESSAGE, - number=10, - message=query_profile.ExplainMetrics, - ) - - -class PartitionQueryRequest(proto.Message): - r"""The request for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents``. - Document resource names are not supported; only database - resource names can be specified. - structured_query (google.cloud.firestore_v1.types.StructuredQuery): - A structured query. - Query must specify collection with all - descendants and be ordered by name ascending. - Other filters, order bys, limits, offsets, and - start/end cursors are not supported. - - This field is a member of `oneof`_ ``query_type``. - partition_count (int): - The desired maximum number of partition - points. The partitions may be returned across - multiple pages of results. The number must be - positive. The actual number of partitions - returned may be fewer. - - For example, this may be set to one fewer than - the number of parallel queries to be run, or in - running a data pipeline job, one fewer than the - number of workers or compute instances - available. - page_token (str): - The ``next_page_token`` value returned from a previous call - to PartitionQuery that may be used to get an additional set - of results. There are no ordering guarantees between sets of - results. Thus, using multiple sets of results will require - merging the different result sets. - - For example, two subsequent calls using a page_token may - return: - - - cursor B, cursor M, cursor Q - - cursor A, cursor U, cursor W - - To obtain a complete result set ordered with respect to the - results of the query supplied to PartitionQuery, the results - sets should be merged: cursor A, cursor B, cursor M, cursor - Q, cursor U, cursor W - page_size (int): - The maximum number of partitions to return in this call, - subject to ``partition_count``. - - For example, if ``partition_count`` = 10 and ``page_size`` = - 8, the first call to PartitionQuery will return up to 8 - partitions and a ``next_page_token`` if more results exist. - A second call to PartitionQuery will return up to 2 - partitions, to complete the total of 10 specified in - ``partition_count``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads documents as they were at the given - time. - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - structured_query: gf_query.StructuredQuery = proto.Field( - proto.MESSAGE, - number=2, - oneof="query_type", - message=gf_query.StructuredQuery, - ) - partition_count: int = proto.Field( - proto.INT64, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - page_size: int = proto.Field( - proto.INT32, - number=5, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - oneof="consistency_selector", - message=timestamp_pb2.Timestamp, - ) - - -class PartitionQueryResponse(proto.Message): - r"""The response for - [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - - Attributes: - partitions (MutableSequence[google.cloud.firestore_v1.types.Cursor]): - Partition results. Each partition is a split point that can - be used by RunQuery as a starting or end point for the query - results. The RunQuery requests must be made with the same - query supplied to this PartitionQuery request. The partition - cursors will be ordered according to same ordering as the - results of the query supplied to PartitionQuery. - - For example, if a PartitionQuery request returns partition - cursors A and B, running the following three queries will - return the entire result set of the original query: - - - query, end_at A - - query, start_at A, end_at B - - query, start_at B - - An empty result may indicate that the query has too few - results to be partitioned, or that the query is not yet - supported for partitioning. - next_page_token (str): - A page token that may be used to request an additional set - of results, up to the number specified by - ``partition_count`` in the PartitionQuery request. If blank, - there are no more results. - """ - - @property - def raw_page(self): - return self - - partitions: MutableSequence[gf_query.Cursor] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gf_query.Cursor, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class WriteRequest(proto.Message): - r"""The request for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - The first request creates a stream, or resumes an existing one from - a token. - - When creating a new stream, the server replies with a response - containing only an ID and a token, to use in the next request. - - When resuming a stream, the server first streams any responses later - than the given token, then a response containing only an up-to-date - token, to use in the next request. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. This is - only required in the first message. - stream_id (str): - The ID of the write stream to resume. - This may only be set in the first message. When - left empty, a new write stream will be created. - writes (MutableSequence[google.cloud.firestore_v1.types.Write]): - The writes to apply. - - Always executed atomically and in order. - This must be empty on the first request. - This may be empty on the last request. - This must not be empty on all other requests. - stream_token (bytes): - A stream token that was previously sent by the server. - - The client should set this field to the token from the most - recent [WriteResponse][google.firestore.v1.WriteResponse] it - has received. This acknowledges that the client has received - responses up to this token. After sending this token, - earlier tokens may not be used anymore. - - The server may close the stream if there are too many - unacknowledged responses. - - Leave this field unset when creating a new stream. To resume - a stream at a specific point, set this field and the - ``stream_id`` field. - - Leave this field unset when creating a new stream. - labels (MutableMapping[str, str]): - Labels associated with this write request. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - stream_id: str = proto.Field( - proto.STRING, - number=2, - ) - writes: MutableSequence[write.Write] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=write.Write, - ) - stream_token: bytes = proto.Field( - proto.BYTES, - number=4, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - - -class WriteResponse(proto.Message): - r"""The response for - [Firestore.Write][google.firestore.v1.Firestore.Write]. - - Attributes: - stream_id (str): - The ID of the stream. - Only set on the first message, when a new stream - was created. - stream_token (bytes): - A token that represents the position of this - response in the stream. This can be used by a - client to resume the stream at this point. - - This field is always set. - write_results (MutableSequence[google.cloud.firestore_v1.types.WriteResult]): - The result of applying the writes. - - This i-th write result corresponds to the i-th - write in the request. - commit_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the commit occurred. Any read with an - equal or greater ``read_time`` is guaranteed to see the - effects of the write. - """ - - stream_id: str = proto.Field( - proto.STRING, - number=1, - ) - stream_token: bytes = proto.Field( - proto.BYTES, - number=2, - ) - write_results: MutableSequence[write.WriteResult] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=write.WriteResult, - ) - commit_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class ListenRequest(proto.Message): - r"""A request for - [Firestore.Listen][google.firestore.v1.Firestore.Listen] - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - add_target (google.cloud.firestore_v1.types.Target): - A target to add to this stream. - - This field is a member of `oneof`_ ``target_change``. - remove_target (int): - The ID of a target to remove from this - stream. - - This field is a member of `oneof`_ ``target_change``. - labels (MutableMapping[str, str]): - Labels associated with this target change. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - add_target: "Target" = proto.Field( - proto.MESSAGE, - number=2, - oneof="target_change", - message="Target", - ) - remove_target: int = proto.Field( - proto.INT32, - number=3, - oneof="target_change", - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - - -class ListenResponse(proto.Message): - r"""The response for - [Firestore.Listen][google.firestore.v1.Firestore.Listen]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - target_change (google.cloud.firestore_v1.types.TargetChange): - Targets have changed. - - This field is a member of `oneof`_ ``response_type``. - document_change (google.cloud.firestore_v1.types.DocumentChange): - A [Document][google.firestore.v1.Document] has changed. - - This field is a member of `oneof`_ ``response_type``. - document_delete (google.cloud.firestore_v1.types.DocumentDelete): - A [Document][google.firestore.v1.Document] has been deleted. - - This field is a member of `oneof`_ ``response_type``. - document_remove (google.cloud.firestore_v1.types.DocumentRemove): - A [Document][google.firestore.v1.Document] has been removed - from a target (because it is no longer relevant to that - target). - - This field is a member of `oneof`_ ``response_type``. - filter (google.cloud.firestore_v1.types.ExistenceFilter): - A filter to apply to the set of documents - previously returned for the given target. - - Returned when documents may have been removed - from the given target, but the exact documents - are unknown. - - This field is a member of `oneof`_ ``response_type``. - """ - - target_change: "TargetChange" = proto.Field( - proto.MESSAGE, - number=2, - oneof="response_type", - message="TargetChange", - ) - document_change: write.DocumentChange = proto.Field( - proto.MESSAGE, - number=3, - oneof="response_type", - message=write.DocumentChange, - ) - document_delete: write.DocumentDelete = proto.Field( - proto.MESSAGE, - number=4, - oneof="response_type", - message=write.DocumentDelete, - ) - document_remove: write.DocumentRemove = proto.Field( - proto.MESSAGE, - number=6, - oneof="response_type", - message=write.DocumentRemove, - ) - filter: write.ExistenceFilter = proto.Field( - proto.MESSAGE, - number=5, - oneof="response_type", - message=write.ExistenceFilter, - ) - - -class Target(proto.Message): - r"""A specification of a set of documents to listen to. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - query (google.cloud.firestore_v1.types.Target.QueryTarget): - A target specified by a query. - - This field is a member of `oneof`_ ``target_type``. - documents (google.cloud.firestore_v1.types.Target.DocumentsTarget): - A target specified by a set of document - names. - - This field is a member of `oneof`_ ``target_type``. - resume_token (bytes): - A resume token from a prior - [TargetChange][google.firestore.v1.TargetChange] for an - identical target. - - Using a resume token with a different target is unsupported - and may fail. - - This field is a member of `oneof`_ ``resume_type``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Start listening after a specific ``read_time``. - - The client must know the state of matching documents at this - time. - - This field is a member of `oneof`_ ``resume_type``. - target_id (int): - The target ID that identifies the target on the stream. Must - be a positive number and non-zero. - - If ``target_id`` is 0 (or unspecified), the server will - assign an ID for this target and return that in a - ``TargetChange::ADD`` event. Once a target with - ``target_id=0`` is added, all subsequent targets must also - have ``target_id=0``. If an ``AddTarget`` request with - ``target_id != 0`` is sent to the server after a target with - ``target_id=0`` is added, the server will immediately send a - response with a ``TargetChange::Remove`` event. - - Note that if the client sends multiple ``AddTarget`` - requests without an ID, the order of IDs returned in - ``TargetChage.target_ids`` are undefined. Therefore, clients - should provide a target ID instead of relying on the server - to assign one. - - If ``target_id`` is non-zero, there must not be an existing - active target on this stream with the same ID. - once (bool): - If the target should be removed once it is - current and consistent. - expected_count (google.protobuf.wrappers_pb2.Int32Value): - The number of documents that last matched the query at the - resume token or read time. - - This value is only relevant when a ``resume_type`` is - provided. This value being present and greater than zero - signals that the client wants - ``ExistenceFilter.unchanged_names`` to be included in the - response. - """ - - class DocumentsTarget(proto.Message): - r"""A target specified by a set of documents names. - - Attributes: - documents (MutableSequence[str]): - The names of the documents to retrieve. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - The request will fail if any of the document is not a child - resource of the given ``database``. Duplicate names will be - elided. - """ - - documents: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - class QueryTarget(proto.Message): - r"""A target specified by a query. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents`` or - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - structured_query (google.cloud.firestore_v1.types.StructuredQuery): - A structured query. - - This field is a member of `oneof`_ ``query_type``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - structured_query: gf_query.StructuredQuery = proto.Field( - proto.MESSAGE, - number=2, - oneof="query_type", - message=gf_query.StructuredQuery, - ) - - query: QueryTarget = proto.Field( - proto.MESSAGE, - number=2, - oneof="target_type", - message=QueryTarget, - ) - documents: DocumentsTarget = proto.Field( - proto.MESSAGE, - number=3, - oneof="target_type", - message=DocumentsTarget, - ) - resume_token: bytes = proto.Field( - proto.BYTES, - number=4, - oneof="resume_type", - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - oneof="resume_type", - message=timestamp_pb2.Timestamp, - ) - target_id: int = proto.Field( - proto.INT32, - number=5, - ) - once: bool = proto.Field( - proto.BOOL, - number=6, - ) - expected_count: wrappers_pb2.Int32Value = proto.Field( - proto.MESSAGE, - number=12, - message=wrappers_pb2.Int32Value, - ) - - -class TargetChange(proto.Message): - r"""Targets being watched have changed. - - Attributes: - target_change_type (google.cloud.firestore_v1.types.TargetChange.TargetChangeType): - The type of change that occurred. - target_ids (MutableSequence[int]): - The target IDs of targets that have changed. - - If empty, the change applies to all targets. - - The order of the target IDs is not defined. - cause (google.rpc.status_pb2.Status): - The error that resulted in this change, if - applicable. - resume_token (bytes): - A token that can be used to resume the stream for the given - ``target_ids``, or all targets if ``target_ids`` is empty. - - Not set on every target change. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The consistent ``read_time`` for the given ``target_ids`` - (omitted when the target_ids are not at a consistent - snapshot). - - The stream is guaranteed to send a ``read_time`` with - ``target_ids`` empty whenever the entire stream reaches a - new consistent snapshot. ADD, CURRENT, and RESET messages - are guaranteed to (eventually) result in a new consistent - snapshot (while NO_CHANGE and REMOVE messages are not). - - For a given stream, ``read_time`` is guaranteed to be - monotonically increasing. - """ - - class TargetChangeType(proto.Enum): - r"""The type of change. - - Values: - NO_CHANGE (0): - No change has occurred. Used only to send an updated - ``resume_token``. - ADD (1): - The targets have been added. - REMOVE (2): - The targets have been removed. - CURRENT (3): - The targets reflect all changes committed before the targets - were added to the stream. - - This will be sent after or with a ``read_time`` that is - greater than or equal to the time at which the targets were - added. - - Listeners can wait for this change if read-after-write - semantics are desired. - RESET (4): - The targets have been reset, and a new initial state for the - targets will be returned in subsequent changes. - - After the initial state is complete, ``CURRENT`` will be - returned even if the target was previously indicated to be - ``CURRENT``. - """ - NO_CHANGE = 0 - ADD = 1 - REMOVE = 2 - CURRENT = 3 - RESET = 4 - - target_change_type: TargetChangeType = proto.Field( - proto.ENUM, - number=1, - enum=TargetChangeType, - ) - target_ids: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=2, - ) - cause: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=3, - message=status_pb2.Status, - ) - resume_token: bytes = proto.Field( - proto.BYTES, - number=4, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - - -class ListCollectionIdsRequest(proto.Message): - r"""The request for - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The parent document. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - page_size (int): - The maximum number of results to return. - page_token (str): - A page token. Must be a value from - [ListCollectionIdsResponse][google.firestore.v1.ListCollectionIdsResponse]. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads documents as they were at the given - time. - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - oneof="consistency_selector", - message=timestamp_pb2.Timestamp, - ) - - -class ListCollectionIdsResponse(proto.Message): - r"""The response from - [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. - - Attributes: - collection_ids (MutableSequence[str]): - The collection ids. - next_page_token (str): - A page token that may be used to continue the - list. - """ - - @property - def raw_page(self): - return self - - collection_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class BatchWriteRequest(proto.Message): - r"""The request for - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - writes (MutableSequence[google.cloud.firestore_v1.types.Write]): - The writes to apply. - - Method does not apply writes atomically and does - not guarantee ordering. Each write succeeds or - fails independently. You cannot write to the - same document more than once per request. - labels (MutableMapping[str, str]): - Labels associated with this batch write. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - writes: MutableSequence[write.Write] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=write.Write, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class BatchWriteResponse(proto.Message): - r"""The response from - [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - - Attributes: - write_results (MutableSequence[google.cloud.firestore_v1.types.WriteResult]): - The result of applying the writes. - - This i-th write result corresponds to the i-th - write in the request. - status (MutableSequence[google.rpc.status_pb2.Status]): - The status of applying the writes. - - This i-th write status corresponds to the i-th - write in the request. - """ - - write_results: MutableSequence[write.WriteResult] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=write.WriteResult, - ) - status: MutableSequence[status_pb2.Status] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/query.py b/google/cloud/firestore_v1/types/query.py deleted file mode 100644 index c2856d0b42..0000000000 --- a/google/cloud/firestore_v1/types/query.py +++ /dev/null @@ -1,913 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_v1.types import document -from google.protobuf import wrappers_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1", - manifest={ - "StructuredQuery", - "StructuredAggregationQuery", - "Cursor", - }, -) - - -class StructuredQuery(proto.Message): - r"""A Firestore query. - - The query stages are executed in the following order: - - 1. from - 2. where - 3. select - 4. order_by + start_at + end_at - 5. offset - 6. limit - 7. find_nearest - - Attributes: - select (google.cloud.firestore_v1.types.StructuredQuery.Projection): - Optional sub-set of the fields to return. - - This acts as a - [DocumentMask][google.firestore.v1.DocumentMask] over the - documents returned from a query. When not set, assumes that - the caller wants all fields returned. - from_ (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.CollectionSelector]): - The collections to query. - where (google.cloud.firestore_v1.types.StructuredQuery.Filter): - The filter to apply. - order_by (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.Order]): - The order to apply to the query results. - - Firestore allows callers to provide a full ordering, a - partial ordering, or no ordering at all. In all cases, - Firestore guarantees a stable ordering through the following - rules: - - - The ``order_by`` is required to reference all fields used - with an inequality filter. - - All fields that are required to be in the ``order_by`` but - are not already present are appended in lexicographical - ordering of the field name. - - If an order on ``__name__`` is not specified, it is - appended by default. - - Fields are appended with the same sort direction as the last - order specified, or 'ASCENDING' if no order was specified. - For example: - - - ``ORDER BY a`` becomes ``ORDER BY a ASC, __name__ ASC`` - - ``ORDER BY a DESC`` becomes - ``ORDER BY a DESC, __name__ DESC`` - - ``WHERE a > 1`` becomes - ``WHERE a > 1 ORDER BY a ASC, __name__ ASC`` - - ``WHERE __name__ > ... AND a > 1`` becomes - ``WHERE __name__ > ... AND a > 1 ORDER BY a ASC, __name__ ASC`` - start_at (google.cloud.firestore_v1.types.Cursor): - A potential prefix of a position in the result set to start - the query at. - - The ordering of the result set is based on the ``ORDER BY`` - clause of the original query. - - :: - - SELECT * FROM k WHERE a = 1 AND b > 2 ORDER BY b ASC, __name__ ASC; - - This query's results are ordered by - ``(b ASC, __name__ ASC)``. - - Cursors can reference either the full ordering or a prefix - of the location, though it cannot reference more fields than - what are in the provided ``ORDER BY``. - - Continuing off the example above, attaching the following - start cursors will have varying impact: - - - ``START BEFORE (2, /k/123)``: start the query right before - ``a = 1 AND b > 2 AND __name__ > /k/123``. - - ``START AFTER (10)``: start the query right after - ``a = 1 AND b > 10``. - - Unlike ``OFFSET`` which requires scanning over the first N - results to skip, a start cursor allows the query to begin at - a logical position. This position is not required to match - an actual result, it will scan forward from this position to - find the next document. - - Requires: - - - The number of values cannot be greater than the number of - fields specified in the ``ORDER BY`` clause. - end_at (google.cloud.firestore_v1.types.Cursor): - A potential prefix of a position in the result set to end - the query at. - - This is similar to ``START_AT`` but with it controlling the - end position rather than the start position. - - Requires: - - - The number of values cannot be greater than the number of - fields specified in the ``ORDER BY`` clause. - offset (int): - The number of documents to skip before returning the first - result. - - This applies after the constraints specified by the - ``WHERE``, ``START AT``, & ``END AT`` but before the - ``LIMIT`` clause. - - Requires: - - - The value must be greater than or equal to zero if - specified. - limit (google.protobuf.wrappers_pb2.Int32Value): - The maximum number of results to return. - - Applies after all other constraints. - - Requires: - - - The value must be greater than or equal to zero if - specified. - find_nearest (google.cloud.firestore_v1.types.StructuredQuery.FindNearest): - Optional. A potential nearest neighbors - search. - Applies after all other filters and ordering. - - Finds the closest vector embeddings to the given - query vector. - """ - - class Direction(proto.Enum): - r"""A sort direction. - - Values: - DIRECTION_UNSPECIFIED (0): - Unspecified. - ASCENDING (1): - Ascending. - DESCENDING (2): - Descending. - """ - DIRECTION_UNSPECIFIED = 0 - ASCENDING = 1 - DESCENDING = 2 - - class CollectionSelector(proto.Message): - r"""A selection of a collection, such as ``messages as m1``. - - Attributes: - collection_id (str): - The collection ID. - When set, selects only collections with this ID. - all_descendants (bool): - When false, selects only collections that are immediate - children of the ``parent`` specified in the containing - ``RunQueryRequest``. When true, selects all descendant - collections. - """ - - collection_id: str = proto.Field( - proto.STRING, - number=2, - ) - all_descendants: bool = proto.Field( - proto.BOOL, - number=3, - ) - - class Filter(proto.Message): - r"""A filter. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - composite_filter (google.cloud.firestore_v1.types.StructuredQuery.CompositeFilter): - A composite filter. - - This field is a member of `oneof`_ ``filter_type``. - field_filter (google.cloud.firestore_v1.types.StructuredQuery.FieldFilter): - A filter on a document field. - - This field is a member of `oneof`_ ``filter_type``. - unary_filter (google.cloud.firestore_v1.types.StructuredQuery.UnaryFilter): - A filter that takes exactly one argument. - - This field is a member of `oneof`_ ``filter_type``. - """ - - composite_filter: "StructuredQuery.CompositeFilter" = proto.Field( - proto.MESSAGE, - number=1, - oneof="filter_type", - message="StructuredQuery.CompositeFilter", - ) - field_filter: "StructuredQuery.FieldFilter" = proto.Field( - proto.MESSAGE, - number=2, - oneof="filter_type", - message="StructuredQuery.FieldFilter", - ) - unary_filter: "StructuredQuery.UnaryFilter" = proto.Field( - proto.MESSAGE, - number=3, - oneof="filter_type", - message="StructuredQuery.UnaryFilter", - ) - - class CompositeFilter(proto.Message): - r"""A filter that merges multiple other filters using the given - operator. - - Attributes: - op (google.cloud.firestore_v1.types.StructuredQuery.CompositeFilter.Operator): - The operator for combining multiple filters. - filters (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.Filter]): - The list of filters to combine. - - Requires: - - - At least one filter is present. - """ - - class Operator(proto.Enum): - r"""A composite filter operator. - - Values: - OPERATOR_UNSPECIFIED (0): - Unspecified. This value must not be used. - AND (1): - Documents are required to satisfy all of the - combined filters. - OR (2): - Documents are required to satisfy at least - one of the combined filters. - """ - OPERATOR_UNSPECIFIED = 0 - AND = 1 - OR = 2 - - op: "StructuredQuery.CompositeFilter.Operator" = proto.Field( - proto.ENUM, - number=1, - enum="StructuredQuery.CompositeFilter.Operator", - ) - filters: MutableSequence["StructuredQuery.Filter"] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="StructuredQuery.Filter", - ) - - class FieldFilter(proto.Message): - r"""A filter on a specific field. - - Attributes: - field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - The field to filter by. - op (google.cloud.firestore_v1.types.StructuredQuery.FieldFilter.Operator): - The operator to filter by. - value (google.cloud.firestore_v1.types.Value): - The value to compare to. - """ - - class Operator(proto.Enum): - r"""A field filter operator. - - Values: - OPERATOR_UNSPECIFIED (0): - Unspecified. This value must not be used. - LESS_THAN (1): - The given ``field`` is less than the given ``value``. - - Requires: - - - That ``field`` come first in ``order_by``. - LESS_THAN_OR_EQUAL (2): - The given ``field`` is less than or equal to the given - ``value``. - - Requires: - - - That ``field`` come first in ``order_by``. - GREATER_THAN (3): - The given ``field`` is greater than the given ``value``. - - Requires: - - - That ``field`` come first in ``order_by``. - GREATER_THAN_OR_EQUAL (4): - The given ``field`` is greater than or equal to the given - ``value``. - - Requires: - - - That ``field`` come first in ``order_by``. - EQUAL (5): - The given ``field`` is equal to the given ``value``. - NOT_EQUAL (6): - The given ``field`` is not equal to the given ``value``. - - Requires: - - - No other ``NOT_EQUAL``, ``NOT_IN``, ``IS_NOT_NULL``, or - ``IS_NOT_NAN``. - - That ``field`` comes first in the ``order_by``. - ARRAY_CONTAINS (7): - The given ``field`` is an array that contains the given - ``value``. - IN (8): - The given ``field`` is equal to at least one value in the - given array. - - Requires: - - - That ``value`` is a non-empty ``ArrayValue``, subject to - disjunction limits. - - No ``NOT_IN`` filters in the same query. - ARRAY_CONTAINS_ANY (9): - The given ``field`` is an array that contains any of the - values in the given array. - - Requires: - - - That ``value`` is a non-empty ``ArrayValue``, subject to - disjunction limits. - - No other ``ARRAY_CONTAINS_ANY`` filters within the same - disjunction. - - No ``NOT_IN`` filters in the same query. - NOT_IN (10): - The value of the ``field`` is not in the given array. - - Requires: - - - That ``value`` is a non-empty ``ArrayValue`` with at most - 10 values. - - No other ``OR``, ``IN``, ``ARRAY_CONTAINS_ANY``, - ``NOT_IN``, ``NOT_EQUAL``, ``IS_NOT_NULL``, or - ``IS_NOT_NAN``. - - That ``field`` comes first in the ``order_by``. - """ - OPERATOR_UNSPECIFIED = 0 - LESS_THAN = 1 - LESS_THAN_OR_EQUAL = 2 - GREATER_THAN = 3 - GREATER_THAN_OR_EQUAL = 4 - EQUAL = 5 - NOT_EQUAL = 6 - ARRAY_CONTAINS = 7 - IN = 8 - ARRAY_CONTAINS_ANY = 9 - NOT_IN = 10 - - field: "StructuredQuery.FieldReference" = proto.Field( - proto.MESSAGE, - number=1, - message="StructuredQuery.FieldReference", - ) - op: "StructuredQuery.FieldFilter.Operator" = proto.Field( - proto.ENUM, - number=2, - enum="StructuredQuery.FieldFilter.Operator", - ) - value: document.Value = proto.Field( - proto.MESSAGE, - number=3, - message=document.Value, - ) - - class UnaryFilter(proto.Message): - r"""A filter with a single operand. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - op (google.cloud.firestore_v1.types.StructuredQuery.UnaryFilter.Operator): - The unary operator to apply. - field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - The field to which to apply the operator. - - This field is a member of `oneof`_ ``operand_type``. - """ - - class Operator(proto.Enum): - r"""A unary operator. - - Values: - OPERATOR_UNSPECIFIED (0): - Unspecified. This value must not be used. - IS_NAN (2): - The given ``field`` is equal to ``NaN``. - IS_NULL (3): - The given ``field`` is equal to ``NULL``. - IS_NOT_NAN (4): - The given ``field`` is not equal to ``NaN``. - - Requires: - - - No other ``NOT_EQUAL``, ``NOT_IN``, ``IS_NOT_NULL``, or - ``IS_NOT_NAN``. - - That ``field`` comes first in the ``order_by``. - IS_NOT_NULL (5): - The given ``field`` is not equal to ``NULL``. - - Requires: - - - A single ``NOT_EQUAL``, ``NOT_IN``, ``IS_NOT_NULL``, or - ``IS_NOT_NAN``. - - That ``field`` comes first in the ``order_by``. - """ - OPERATOR_UNSPECIFIED = 0 - IS_NAN = 2 - IS_NULL = 3 - IS_NOT_NAN = 4 - IS_NOT_NULL = 5 - - op: "StructuredQuery.UnaryFilter.Operator" = proto.Field( - proto.ENUM, - number=1, - enum="StructuredQuery.UnaryFilter.Operator", - ) - field: "StructuredQuery.FieldReference" = proto.Field( - proto.MESSAGE, - number=2, - oneof="operand_type", - message="StructuredQuery.FieldReference", - ) - - class Order(proto.Message): - r"""An order on a field. - - Attributes: - field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - The field to order by. - direction (google.cloud.firestore_v1.types.StructuredQuery.Direction): - The direction to order by. Defaults to ``ASCENDING``. - """ - - field: "StructuredQuery.FieldReference" = proto.Field( - proto.MESSAGE, - number=1, - message="StructuredQuery.FieldReference", - ) - direction: "StructuredQuery.Direction" = proto.Field( - proto.ENUM, - number=2, - enum="StructuredQuery.Direction", - ) - - class FieldReference(proto.Message): - r"""A reference to a field in a document, ex: ``stats.operations``. - - Attributes: - field_path (str): - A reference to a field in a document. - - Requires: - - - MUST be a dot-delimited (``.``) string of segments, where - each segment conforms to [document field - name][google.firestore.v1.Document.fields] limitations. - """ - - field_path: str = proto.Field( - proto.STRING, - number=2, - ) - - class Projection(proto.Message): - r"""The projection of document's fields to return. - - Attributes: - fields (MutableSequence[google.cloud.firestore_v1.types.StructuredQuery.FieldReference]): - The fields to return. - - If empty, all fields are returned. To only return the name - of the document, use ``['__name__']``. - """ - - fields: MutableSequence["StructuredQuery.FieldReference"] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="StructuredQuery.FieldReference", - ) - - class FindNearest(proto.Message): - r"""Nearest Neighbors search config. The ordering provided by - FindNearest supersedes the order_by stage. If multiple documents - have the same vector distance, the returned document order is not - guaranteed to be stable between queries. - - Attributes: - vector_field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - Required. An indexed vector field to search upon. Only - documents which contain vectors whose dimensionality match - the query_vector can be returned. - query_vector (google.cloud.firestore_v1.types.Value): - Required. The query vector that we are - searching on. Must be a vector of no more than - 2048 dimensions. - distance_measure (google.cloud.firestore_v1.types.StructuredQuery.FindNearest.DistanceMeasure): - Required. The distance measure to use, - required. - limit (google.protobuf.wrappers_pb2.Int32Value): - Required. The number of nearest neighbors to - return. Must be a positive integer of no more - than 1000. - distance_result_field (str): - Optional. Optional name of the field to output the result of - the vector distance calculation. Must conform to [document - field name][google.firestore.v1.Document.fields] - limitations. - distance_threshold (google.protobuf.wrappers_pb2.DoubleValue): - Optional. Option to specify a threshold for which no less - similar documents will be returned. The behavior of the - specified ``distance_measure`` will affect the meaning of - the distance threshold. Since DOT_PRODUCT distances increase - when the vectors are more similar, the comparison is - inverted. - - - For EUCLIDEAN, COSINE: WHERE distance <= - distance_threshold - - For DOT_PRODUCT: WHERE distance >= distance_threshold - """ - - class DistanceMeasure(proto.Enum): - r"""The distance measure to use when comparing vectors. - - Values: - DISTANCE_MEASURE_UNSPECIFIED (0): - Should not be set. - EUCLIDEAN (1): - Measures the EUCLIDEAN distance between the vectors. See - `Euclidean `__ - to learn more. The resulting distance decreases the more - similar two vectors are. - COSINE (2): - COSINE distance compares vectors based on the angle between - them, which allows you to measure similarity that isn't - based on the vectors magnitude. We recommend using - DOT_PRODUCT with unit normalized vectors instead of COSINE - distance, which is mathematically equivalent with better - performance. See `Cosine - Similarity `__ - to learn more about COSINE similarity and COSINE distance. - The resulting COSINE distance decreases the more similar two - vectors are. - DOT_PRODUCT (3): - Similar to cosine but is affected by the magnitude of the - vectors. See `Dot - Product `__ to - learn more. The resulting distance increases the more - similar two vectors are. - """ - DISTANCE_MEASURE_UNSPECIFIED = 0 - EUCLIDEAN = 1 - COSINE = 2 - DOT_PRODUCT = 3 - - vector_field: "StructuredQuery.FieldReference" = proto.Field( - proto.MESSAGE, - number=1, - message="StructuredQuery.FieldReference", - ) - query_vector: document.Value = proto.Field( - proto.MESSAGE, - number=2, - message=document.Value, - ) - distance_measure: "StructuredQuery.FindNearest.DistanceMeasure" = proto.Field( - proto.ENUM, - number=3, - enum="StructuredQuery.FindNearest.DistanceMeasure", - ) - limit: wrappers_pb2.Int32Value = proto.Field( - proto.MESSAGE, - number=4, - message=wrappers_pb2.Int32Value, - ) - distance_result_field: str = proto.Field( - proto.STRING, - number=5, - ) - distance_threshold: wrappers_pb2.DoubleValue = proto.Field( - proto.MESSAGE, - number=6, - message=wrappers_pb2.DoubleValue, - ) - - select: Projection = proto.Field( - proto.MESSAGE, - number=1, - message=Projection, - ) - from_: MutableSequence[CollectionSelector] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=CollectionSelector, - ) - where: Filter = proto.Field( - proto.MESSAGE, - number=3, - message=Filter, - ) - order_by: MutableSequence[Order] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=Order, - ) - start_at: "Cursor" = proto.Field( - proto.MESSAGE, - number=7, - message="Cursor", - ) - end_at: "Cursor" = proto.Field( - proto.MESSAGE, - number=8, - message="Cursor", - ) - offset: int = proto.Field( - proto.INT32, - number=6, - ) - limit: wrappers_pb2.Int32Value = proto.Field( - proto.MESSAGE, - number=5, - message=wrappers_pb2.Int32Value, - ) - find_nearest: FindNearest = proto.Field( - proto.MESSAGE, - number=9, - message=FindNearest, - ) - - -class StructuredAggregationQuery(proto.Message): - r"""Firestore query for running an aggregation over a - [StructuredQuery][google.firestore.v1.StructuredQuery]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - structured_query (google.cloud.firestore_v1.types.StructuredQuery): - Nested structured query. - - This field is a member of `oneof`_ ``query_type``. - aggregations (MutableSequence[google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation]): - Optional. Series of aggregations to apply over the results - of the ``structured_query``. - - Requires: - - - A minimum of one and maximum of five aggregations per - query. - """ - - class Aggregation(proto.Message): - r"""Defines an aggregation that produces a single result. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - count (google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation.Count): - Count aggregator. - - This field is a member of `oneof`_ ``operator``. - sum (google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation.Sum): - Sum aggregator. - - This field is a member of `oneof`_ ``operator``. - avg (google.cloud.firestore_v1.types.StructuredAggregationQuery.Aggregation.Avg): - Average aggregator. - - This field is a member of `oneof`_ ``operator``. - alias (str): - Optional. Optional name of the field to store the result of - the aggregation into. - - If not provided, Firestore will pick a default name - following the format ``field_``. For - example: - - :: - - AGGREGATE - COUNT_UP_TO(1) AS count_up_to_1, - COUNT_UP_TO(2), - COUNT_UP_TO(3) AS count_up_to_3, - COUNT(*) - OVER ( - ... - ); - - becomes: - - :: - - AGGREGATE - COUNT_UP_TO(1) AS count_up_to_1, - COUNT_UP_TO(2) AS field_1, - COUNT_UP_TO(3) AS count_up_to_3, - COUNT(*) AS field_2 - OVER ( - ... - ); - - Requires: - - - Must be unique across all aggregation aliases. - - Conform to [document field - name][google.firestore.v1.Document.fields] limitations. - """ - - class Count(proto.Message): - r"""Count of documents that match the query. - - The ``COUNT(*)`` aggregation function operates on the entire - document so it does not require a field reference. - - Attributes: - up_to (google.protobuf.wrappers_pb2.Int64Value): - Optional. Optional constraint on the maximum number of - documents to count. - - This provides a way to set an upper bound on the number of - documents to scan, limiting latency, and cost. - - Unspecified is interpreted as no bound. - - High-Level Example: - - :: - - AGGREGATE COUNT_UP_TO(1000) OVER ( SELECT * FROM k ); - - Requires: - - - Must be greater than zero when present. - """ - - up_to: wrappers_pb2.Int64Value = proto.Field( - proto.MESSAGE, - number=1, - message=wrappers_pb2.Int64Value, - ) - - class Sum(proto.Message): - r"""Sum of the values of the requested field. - - - Only numeric values will be aggregated. All non-numeric values - including ``NULL`` are skipped. - - - If the aggregated values contain ``NaN``, returns ``NaN``. - Infinity math follows IEEE-754 standards. - - - If the aggregated value set is empty, returns 0. - - - Returns a 64-bit integer if all aggregated numbers are integers - and the sum result does not overflow. Otherwise, the result is - returned as a double. Note that even if all the aggregated values - are integers, the result is returned as a double if it cannot fit - within a 64-bit signed integer. When this occurs, the returned - value will lose precision. - - - When underflow occurs, floating-point aggregation is - non-deterministic. This means that running the same query - repeatedly without any changes to the underlying values could - produce slightly different results each time. In those cases, - values should be stored as integers over floating-point numbers. - - Attributes: - field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - The field to aggregate on. - """ - - field: "StructuredQuery.FieldReference" = proto.Field( - proto.MESSAGE, - number=1, - message="StructuredQuery.FieldReference", - ) - - class Avg(proto.Message): - r"""Average of the values of the requested field. - - - Only numeric values will be aggregated. All non-numeric values - including ``NULL`` are skipped. - - - If the aggregated values contain ``NaN``, returns ``NaN``. - Infinity math follows IEEE-754 standards. - - - If the aggregated value set is empty, returns ``NULL``. - - - Always returns the result as a double. - - Attributes: - field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): - The field to aggregate on. - """ - - field: "StructuredQuery.FieldReference" = proto.Field( - proto.MESSAGE, - number=1, - message="StructuredQuery.FieldReference", - ) - - count: "StructuredAggregationQuery.Aggregation.Count" = proto.Field( - proto.MESSAGE, - number=1, - oneof="operator", - message="StructuredAggregationQuery.Aggregation.Count", - ) - sum: "StructuredAggregationQuery.Aggregation.Sum" = proto.Field( - proto.MESSAGE, - number=2, - oneof="operator", - message="StructuredAggregationQuery.Aggregation.Sum", - ) - avg: "StructuredAggregationQuery.Aggregation.Avg" = proto.Field( - proto.MESSAGE, - number=3, - oneof="operator", - message="StructuredAggregationQuery.Aggregation.Avg", - ) - alias: str = proto.Field( - proto.STRING, - number=7, - ) - - structured_query: "StructuredQuery" = proto.Field( - proto.MESSAGE, - number=1, - oneof="query_type", - message="StructuredQuery", - ) - aggregations: MutableSequence[Aggregation] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Aggregation, - ) - - -class Cursor(proto.Message): - r"""A position in a query result set. - - Attributes: - values (MutableSequence[google.cloud.firestore_v1.types.Value]): - The values that represent a position, in the - order they appear in the order by clause of a - query. - - Can contain fewer values than specified in the - order by clause. - before (bool): - If the position is just before or just after - the given values, relative to the sort order - defined by the query. - """ - - values: MutableSequence[document.Value] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=document.Value, - ) - before: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/query_profile.py b/google/cloud/firestore_v1/types/query_profile.py deleted file mode 100644 index f93184ae39..0000000000 --- a/google/cloud/firestore_v1/types/query_profile.py +++ /dev/null @@ -1,144 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1", - manifest={ - "ExplainOptions", - "ExplainMetrics", - "PlanSummary", - "ExecutionStats", - }, -) - - -class ExplainOptions(proto.Message): - r"""Explain options for the query. - - Attributes: - analyze (bool): - Optional. Whether to execute this query. - - When false (the default), the query will be - planned, returning only metrics from the - planning stages. - - When true, the query will be planned and - executed, returning the full query results along - with both planning and execution stage metrics. - """ - - analyze: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -class ExplainMetrics(proto.Message): - r"""Explain metrics for the query. - - Attributes: - plan_summary (google.cloud.firestore_v1.types.PlanSummary): - Planning phase information for the query. - execution_stats (google.cloud.firestore_v1.types.ExecutionStats): - Aggregated stats from the execution of the query. Only - present when - [ExplainOptions.analyze][google.firestore.v1.ExplainOptions.analyze] - is set to true. - """ - - plan_summary: "PlanSummary" = proto.Field( - proto.MESSAGE, - number=1, - message="PlanSummary", - ) - execution_stats: "ExecutionStats" = proto.Field( - proto.MESSAGE, - number=2, - message="ExecutionStats", - ) - - -class PlanSummary(proto.Message): - r"""Planning phase information for the query. - - Attributes: - indexes_used (MutableSequence[google.protobuf.struct_pb2.Struct]): - The indexes selected for the query. For example: [ - {"query_scope": "Collection", "properties": "(foo ASC, - **name** ASC)"}, {"query_scope": "Collection", "properties": - "(bar ASC, **name** ASC)"} ] - """ - - indexes_used: MutableSequence[struct_pb2.Struct] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=struct_pb2.Struct, - ) - - -class ExecutionStats(proto.Message): - r"""Execution statistics for the query. - - Attributes: - results_returned (int): - Total number of results returned, including - documents, projections, aggregation results, - keys. - execution_duration (google.protobuf.duration_pb2.Duration): - Total time to execute the query in the - backend. - read_operations (int): - Total billable read operations. - debug_stats (google.protobuf.struct_pb2.Struct): - Debugging statistics from the execution of the query. Note - that the debugging stats are subject to change as Firestore - evolves. It could include: { "indexes_entries_scanned": - "1000", "documents_scanned": "20", "billing_details" : { - "documents_billable": "20", "index_entries_billable": - "1000", "min_query_cost": "0" } } - """ - - results_returned: int = proto.Field( - proto.INT64, - number=1, - ) - execution_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - read_operations: int = proto.Field( - proto.INT64, - number=4, - ) - debug_stats: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=5, - message=struct_pb2.Struct, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/write.py b/google/cloud/firestore_v1/types/write.py deleted file mode 100644 index e393b91480..0000000000 --- a/google/cloud/firestore_v1/types/write.py +++ /dev/null @@ -1,514 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_v1.types import bloom_filter -from google.cloud.firestore_v1.types import common -from google.cloud.firestore_v1.types import document as gf_document -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1", - manifest={ - "Write", - "DocumentTransform", - "WriteResult", - "DocumentChange", - "DocumentDelete", - "DocumentRemove", - "ExistenceFilter", - }, -) - - -class Write(proto.Message): - r"""A write on a document. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - update (google.cloud.firestore_v1.types.Document): - A document to write. - - This field is a member of `oneof`_ ``operation``. - delete (str): - A document name to delete. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This field is a member of `oneof`_ ``operation``. - transform (google.cloud.firestore_v1.types.DocumentTransform): - Applies a transformation to a document. - - This field is a member of `oneof`_ ``operation``. - update_mask (google.cloud.firestore_v1.types.DocumentMask): - The fields to update in this write. - - This field can be set only when the operation is ``update``. - If the mask is not set for an ``update`` and the document - exists, any existing data will be overwritten. If the mask - is set and the document on the server has fields not covered - by the mask, they are left unchanged. Fields referenced in - the mask, but not present in the input document, are deleted - from the document on the server. The field paths in this - mask must not contain a reserved field name. - update_transforms (MutableSequence[google.cloud.firestore_v1.types.DocumentTransform.FieldTransform]): - The transforms to perform after update. - - This field can be set only when the operation is ``update``. - If present, this write is equivalent to performing - ``update`` and ``transform`` to the same document atomically - and in order. - current_document (google.cloud.firestore_v1.types.Precondition): - An optional precondition on the document. - - The write will fail if this is set and not met - by the target document. - """ - - update: gf_document.Document = proto.Field( - proto.MESSAGE, - number=1, - oneof="operation", - message=gf_document.Document, - ) - delete: str = proto.Field( - proto.STRING, - number=2, - oneof="operation", - ) - transform: "DocumentTransform" = proto.Field( - proto.MESSAGE, - number=6, - oneof="operation", - message="DocumentTransform", - ) - update_mask: common.DocumentMask = proto.Field( - proto.MESSAGE, - number=3, - message=common.DocumentMask, - ) - update_transforms: MutableSequence[ - "DocumentTransform.FieldTransform" - ] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message="DocumentTransform.FieldTransform", - ) - current_document: common.Precondition = proto.Field( - proto.MESSAGE, - number=4, - message=common.Precondition, - ) - - -class DocumentTransform(proto.Message): - r"""A transformation of a document. - - Attributes: - document (str): - The name of the document to transform. - field_transforms (MutableSequence[google.cloud.firestore_v1.types.DocumentTransform.FieldTransform]): - The list of transformations to apply to the - fields of the document, in order. - This must not be empty. - """ - - class FieldTransform(proto.Message): - r"""A transformation of a field of the document. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - field_path (str): - The path of the field. See - [Document.fields][google.firestore.v1.Document.fields] for - the field path syntax reference. - set_to_server_value (google.cloud.firestore_v1.types.DocumentTransform.FieldTransform.ServerValue): - Sets the field to the given server value. - - This field is a member of `oneof`_ ``transform_type``. - increment (google.cloud.firestore_v1.types.Value): - Adds the given value to the field's current - value. - This must be an integer or a double value. - If the field is not an integer or double, or if - the field does not yet exist, the transformation - will set the field to the given value. If either - of the given value or the current field value - are doubles, both values will be interpreted as - doubles. Double arithmetic and representation of - double values follow IEEE 754 semantics. If - there is positive/negative integer overflow, the - field is resolved to the largest magnitude - positive/negative integer. - - This field is a member of `oneof`_ ``transform_type``. - maximum (google.cloud.firestore_v1.types.Value): - Sets the field to the maximum of its current - value and the given value. - This must be an integer or a double value. - If the field is not an integer or double, or if - the field does not yet exist, the transformation - will set the field to the given value. If a - maximum operation is applied where the field and - the input value are of mixed types (that is - - one is an integer and one is a double) the field - takes on the type of the larger operand. If the - operands are equivalent (e.g. 3 and 3.0), the - field does not change. 0, 0.0, and -0.0 are all - zero. The maximum of a zero stored value and - zero input value is always the stored value. - The maximum of any numeric value x and NaN is - NaN. - - This field is a member of `oneof`_ ``transform_type``. - minimum (google.cloud.firestore_v1.types.Value): - Sets the field to the minimum of its current - value and the given value. - This must be an integer or a double value. - If the field is not an integer or double, or if - the field does not yet exist, the transformation - will set the field to the input value. If a - minimum operation is applied where the field and - the input value are of mixed types (that is - - one is an integer and one is a double) the field - takes on the type of the smaller operand. If the - operands are equivalent (e.g. 3 and 3.0), the - field does not change. 0, 0.0, and -0.0 are all - zero. The minimum of a zero stored value and - zero input value is always the stored value. - The minimum of any numeric value x and NaN is - NaN. - - This field is a member of `oneof`_ ``transform_type``. - append_missing_elements (google.cloud.firestore_v1.types.ArrayValue): - Append the given elements in order if they are not already - present in the current field value. If the field is not an - array, or if the field does not yet exist, it is first set - to the empty array. - - Equivalent numbers of different types (e.g. 3L and 3.0) are - considered equal when checking if a value is missing. NaN is - equal to NaN, and Null is equal to Null. If the input - contains multiple equivalent values, only the first will be - considered. - - The corresponding transform_result will be the null value. - - This field is a member of `oneof`_ ``transform_type``. - remove_all_from_array (google.cloud.firestore_v1.types.ArrayValue): - Remove all of the given elements from the array in the - field. If the field is not an array, or if the field does - not yet exist, it is set to the empty array. - - Equivalent numbers of the different types (e.g. 3L and 3.0) - are considered equal when deciding whether an element should - be removed. NaN is equal to NaN, and Null is equal to Null. - This will remove all equivalent values if there are - duplicates. - - The corresponding transform_result will be the null value. - - This field is a member of `oneof`_ ``transform_type``. - """ - - class ServerValue(proto.Enum): - r"""A value that is calculated by the server. - - Values: - SERVER_VALUE_UNSPECIFIED (0): - Unspecified. This value must not be used. - REQUEST_TIME (1): - The time at which the server processed the - request, with millisecond precision. If used on - multiple fields (same or different documents) in - a transaction, all the fields will get the same - server timestamp. - """ - SERVER_VALUE_UNSPECIFIED = 0 - REQUEST_TIME = 1 - - field_path: str = proto.Field( - proto.STRING, - number=1, - ) - set_to_server_value: "DocumentTransform.FieldTransform.ServerValue" = ( - proto.Field( - proto.ENUM, - number=2, - oneof="transform_type", - enum="DocumentTransform.FieldTransform.ServerValue", - ) - ) - increment: gf_document.Value = proto.Field( - proto.MESSAGE, - number=3, - oneof="transform_type", - message=gf_document.Value, - ) - maximum: gf_document.Value = proto.Field( - proto.MESSAGE, - number=4, - oneof="transform_type", - message=gf_document.Value, - ) - minimum: gf_document.Value = proto.Field( - proto.MESSAGE, - number=5, - oneof="transform_type", - message=gf_document.Value, - ) - append_missing_elements: gf_document.ArrayValue = proto.Field( - proto.MESSAGE, - number=6, - oneof="transform_type", - message=gf_document.ArrayValue, - ) - remove_all_from_array: gf_document.ArrayValue = proto.Field( - proto.MESSAGE, - number=7, - oneof="transform_type", - message=gf_document.ArrayValue, - ) - - document: str = proto.Field( - proto.STRING, - number=1, - ) - field_transforms: MutableSequence[FieldTransform] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=FieldTransform, - ) - - -class WriteResult(proto.Message): - r"""The result of applying a write. - - Attributes: - update_time (google.protobuf.timestamp_pb2.Timestamp): - The last update time of the document after applying the - write. Not set after a ``delete``. - - If the write did not actually change the document, this will - be the previous update_time. - transform_results (MutableSequence[google.cloud.firestore_v1.types.Value]): - The results of applying each - [DocumentTransform.FieldTransform][google.firestore.v1.DocumentTransform.FieldTransform], - in the same order. - """ - - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - transform_results: MutableSequence[gf_document.Value] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gf_document.Value, - ) - - -class DocumentChange(proto.Message): - r"""A [Document][google.firestore.v1.Document] has changed. - - May be the result of multiple [writes][google.firestore.v1.Write], - including deletes, that ultimately resulted in a new value for the - [Document][google.firestore.v1.Document]. - - Multiple [DocumentChange][google.firestore.v1.DocumentChange] - messages may be returned for the same logical change, if multiple - targets are affected. - - Attributes: - document (google.cloud.firestore_v1.types.Document): - The new state of the - [Document][google.firestore.v1.Document]. - - If ``mask`` is set, contains only fields that were updated - or added. - target_ids (MutableSequence[int]): - A set of target IDs of targets that match - this document. - removed_target_ids (MutableSequence[int]): - A set of target IDs for targets that no - longer match this document. - """ - - document: gf_document.Document = proto.Field( - proto.MESSAGE, - number=1, - message=gf_document.Document, - ) - target_ids: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=5, - ) - removed_target_ids: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=6, - ) - - -class DocumentDelete(proto.Message): - r"""A [Document][google.firestore.v1.Document] has been deleted. - - May be the result of multiple [writes][google.firestore.v1.Write], - including updates, the last of which deleted the - [Document][google.firestore.v1.Document]. - - Multiple [DocumentDelete][google.firestore.v1.DocumentDelete] - messages may be returned for the same logical delete, if multiple - targets are affected. - - Attributes: - document (str): - The resource name of the - [Document][google.firestore.v1.Document] that was deleted. - removed_target_ids (MutableSequence[int]): - A set of target IDs for targets that - previously matched this entity. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The read timestamp at which the delete was observed. - - Greater or equal to the ``commit_time`` of the delete. - """ - - document: str = proto.Field( - proto.STRING, - number=1, - ) - removed_target_ids: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=6, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class DocumentRemove(proto.Message): - r"""A [Document][google.firestore.v1.Document] has been removed from the - view of the targets. - - Sent if the document is no longer relevant to a target and is out of - view. Can be sent instead of a DocumentDelete or a DocumentChange if - the server can not send the new value of the document. - - Multiple [DocumentRemove][google.firestore.v1.DocumentRemove] - messages may be returned for the same logical write or delete, if - multiple targets are affected. - - Attributes: - document (str): - The resource name of the - [Document][google.firestore.v1.Document] that has gone out - of view. - removed_target_ids (MutableSequence[int]): - A set of target IDs for targets that - previously matched this document. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The read timestamp at which the remove was observed. - - Greater or equal to the ``commit_time`` of the - change/delete/remove. - """ - - document: str = proto.Field( - proto.STRING, - number=1, - ) - removed_target_ids: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=2, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class ExistenceFilter(proto.Message): - r"""A digest of all the documents that match a given target. - - Attributes: - target_id (int): - The target ID to which this filter applies. - count (int): - The total count of documents that match - [target_id][google.firestore.v1.ExistenceFilter.target_id]. - - If different from the count of documents in the client that - match, the client must manually determine which documents no - longer match the target. - - The client can use the ``unchanged_names`` bloom filter to - assist with this determination by testing ALL the document - names against the filter; if the document name is NOT in the - filter, it means the document no longer matches the target. - unchanged_names (google.cloud.firestore_v1.types.BloomFilter): - A bloom filter that, despite its name, contains the UTF-8 - byte encodings of the resource names of ALL the documents - that match - [target_id][google.firestore.v1.ExistenceFilter.target_id], - in the form - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - - This bloom filter may be omitted at the server's discretion, - such as if it is deemed that the client will not make use of - it or if it is too computationally expensive to calculate or - transmit. Clients must gracefully handle this field being - absent by falling back to the logic used before this field - existed; that is, re-add the target without a resume token - to figure out which documents in the client's cache are out - of sync. - """ - - target_id: int = proto.Field( - proto.INT32, - number=1, - ) - count: int = proto.Field( - proto.INT32, - number=2, - ) - unchanged_names: bloom_filter.BloomFilter = proto.Field( - proto.MESSAGE, - number=3, - message=bloom_filter.BloomFilter, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py deleted file mode 100644 index 4fb209cbc4..0000000000 --- a/noxfile.py +++ /dev/null @@ -1,584 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! - -from __future__ import absolute_import - -import os -import pathlib -import re -import shutil -from typing import Dict, List -import warnings - -import nox - -FLAKE8_VERSION = "flake8==6.1.0" -PYTYPE_VERSION = "pytype==2020.7.24" -BLACK_VERSION = "black[jupyter]==23.7.0" -ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -DEFAULT_PYTHON_VERSION = "3.14" - -UNIT_TEST_PYTHON_VERSIONS: List[str] = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", - "3.14", -] -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio==0.21.2", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ - "aiounittest", - "six", - "freezegun", -] -UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [] -UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.14"] -SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ - "pytest-asyncio==0.21.2", - "six", -] -SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -nox.options.sessions = [ - "unit-3.9", - "unit-3.10", - "unit-3.11", - "unit-3.12", - "unit-3.13", - "unit-3.14", - "system_emulated", - "system", - "mypy", - "cover", - "lint", - "lint_setup_py", - "blacken", - "docs", - "docfx", - "format", -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install(FLAKE8_VERSION, BLACK_VERSION) - session.run( - "black", - "--check", - *LINT_PATHS, - ) - session.run("flake8", "google", "tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run( - "isort", - "--fss", - *LINT_PATHS, - ) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python="3.7") -def pytype(session): - """Verify type hints are pytype compatible.""" - session.install(PYTYPE_VERSION) - session.run( - "pytype", - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def mypy(session): - """Verify type hints are mypy compatible.""" - session.install("-e", ".") - session.install("mypy", "types-setuptools", "types-protobuf") - session.run( - "mypy", - "-p", - "google.cloud.firestore_v1", - "--no-incremental", - "--check-untyped-defs", - "--exclude", - "services", - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("setuptools", "docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def unit(session, protobuf_implementation): - # Install all test dependencies, then install this package in-place. - - py_version = tuple([int(v) for v in session.python.split(".")]) - if protobuf_implementation == "cpp" and py_version >= (3, 11): - session.skip("cpp implementation is not supported in python 3.11+") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - - # TODO(https://github.com/googleapis/synthtool/issues/1976): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -def install_systemtest_dependencies(session, *constraints): - # Use pre-release gRPC for system tests. - # Exclude version 1.52.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/issues/32163 - session.install("--pre", "grpcio!=1.52.0rc1") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system_emulated(session): - import subprocess - import signal - - try: - # https://github.com/googleapis/python-firestore/issues/472 - # Kokoro image doesn't have java installed, don't attempt to run emulator. - subprocess.call(["java", "--version"]) - except OSError: - session.skip("java not found but required for emulator support") - - try: - subprocess.call(["gcloud", "--version"]) - except OSError: - session.skip("gcloud not found but required for emulator support") - - # Currently, CI/CD doesn't have beta component of gcloud. - subprocess.call( - [ - "gcloud", - "components", - "install", - "beta", - "cloud-firestore-emulator", - ] - ) - - hostport = "localhost:8789" - session.env["FIRESTORE_EMULATOR_HOST"] = hostport - - p = subprocess.Popen( - [ - "gcloud", - "--quiet", - "beta", - "emulators", - "firestore", - "start", - "--host-port", - hostport, - ] - ) - - try: - system(session) - finally: - # Stop Emulator - os.killpg(os.getpgid(p.pid), signal.SIGKILL) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): - """Run the system test suite.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": - session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Install pyopenssl for mTLS testing. - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - install_systemtest_dependencies(session, "-c", constraints_path) - - # Run py.test against the system tests. - if system_test_exists: - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if system_test_folder_exists: - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run( - "coverage", - "report", - "--show-missing", - "--fail-under=100", - "--omit=tests/*", - ) - - session.run("coverage", "erase") - - -@nox.session(python="3.10") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "sphinx==4.5.0", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python="3.10") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "gcp-sphinx-docfx-yaml", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" - - py_version = tuple([int(v) for v in session.python.split(".")]) - if protobuf_implementation == "cpp" and py_version >= (3, 11): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install all dependencies - session.install("-e", ".[all, tests, tracing]") - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "protobuf", - # dependency of grpc - "six", - "grpc-google-iam-v1", - "googleapis-common-protos", - "grpcio", - "grpcio-status", - "google-api-core", - "google-auth", - "proto-plus", - "google-cloud-testutils", - # dependencies of google-cloud-testutils" - "click", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) diff --git a/owlbot.py b/owlbot.py deleted file mode 100644 index ec92a93451..0000000000 --- a/owlbot.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" -from pathlib import Path -from typing import List, Optional - -import synthtool as s -from synthtool import gcp -from synthtool.languages import python - -common = gcp.CommonTemplates() - -# This library ships clients for 3 different APIs, -# firestore, firestore_admin and firestore_bundle. -# firestore_bundle is not versioned -firestore_default_version = "v1" -firestore_admin_default_version = "v1" - -def update_fixup_scripts(path): - # Add message for missing 'libcst' dependency - s.replace( - library / "scripts" / path, - """import libcst as cst""", - """try: - import libcst as cst -except ImportError: - raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.') - - - """, - ) - -for library in s.get_staging_dirs(default_version=firestore_default_version): - s.move(library / f"google/cloud/firestore_{library.name}", excludes=[f"__init__.py", "noxfile.py"]) - s.move(library / f"tests/", f"tests") - fixup_script_path = "fixup_firestore_v1_keywords.py" - update_fixup_scripts(fixup_script_path) - s.move(library / "scripts" / fixup_script_path) - -for library in s.get_staging_dirs(default_version=firestore_admin_default_version): - s.move(library / f"google/cloud/firestore_admin_{library.name}", excludes=[f"__init__.py", "noxfile.py"]) - s.move(library / f"tests", f"tests") - fixup_script_path = "fixup_firestore_admin_v1_keywords.py" - update_fixup_scripts(fixup_script_path) - s.move(library / "scripts" / fixup_script_path) - -for library in s.get_staging_dirs(): - s.replace( - library / "google/cloud/bundle/types/bundle.py", - "from google.firestore.v1 import document_pb2 # type: ignore\n" - "from google.firestore.v1 import query_pb2 # type: ignore", - "from google.cloud.firestore_v1.types import document as document_pb2 # type: ignore\n" - "from google.cloud.firestore_v1.types import query as query_pb2 # type: ignore" - ) - - s.replace( - library / "google/cloud/bundle/__init__.py", - "from .types.bundle import BundleMetadata\n" - "from .types.bundle import NamedQuery\n", - "from .types.bundle import BundleMetadata\n" - "from .types.bundle import NamedQuery\n" - "\n" - "from .bundle import FirestoreBundle\n", - ) - - s.replace( - library / "google/cloud/bundle/__init__.py", - "from google.cloud.bundle import gapic_version as package_version\n", - "from google.cloud.firestore_bundle import gapic_version as package_version\n", - ) - - s.replace( - library / "google/cloud/bundle/__init__.py", - "\'BundledQuery\',", - "\"BundledQuery\",\n\"FirestoreBundle\",",) - - s.move( - library / f"google/cloud/bundle", - f"google/cloud/firestore_bundle", - excludes=["noxfile.py"], - ) - s.move(library / f"tests", f"tests") - -s.remove_staging_dirs() - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library( - samples=False, # set to True only if there are samples - unit_test_external_dependencies=["aiounittest", "six", "freezegun"], - system_test_external_dependencies=["pytest-asyncio", "six"], - microgenerator=True, - cov_level=100, - split_system_tests=True, - default_python_version="3.14", - system_test_python_versions=["3.14"], - unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], -) - -s.move(templated_files, - excludes=[".github/**", ".kokoro/**", "renovate.json"]) - -python.py_samples(skip_readmes=True) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md deleted file mode 100644 index 8249522ffc..0000000000 --- a/samples/AUTHORING_GUIDE.md +++ /dev/null @@ -1 +0,0 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md deleted file mode 100644 index f5fe2e6baf..0000000000 --- a/samples/CONTRIBUTING.md +++ /dev/null @@ -1 +0,0 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md \ No newline at end of file diff --git a/scripts/fixup_firestore_admin_v1_keywords.py b/scripts/fixup_firestore_admin_v1_keywords.py deleted file mode 100644 index 05bd87f0e2..0000000000 --- a/scripts/fixup_firestore_admin_v1_keywords.py +++ /dev/null @@ -1,213 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -try: - import libcst as cst -except ImportError: - raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.') - - - -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class firestore_adminCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'bulk_delete_documents': ('name', 'collection_ids', 'namespace_ids', ), - 'clone_database': ('parent', 'database_id', 'pitr_snapshot', 'encryption_config', 'tags', ), - 'create_backup_schedule': ('parent', 'backup_schedule', ), - 'create_database': ('parent', 'database', 'database_id', ), - 'create_index': ('parent', 'index', ), - 'create_user_creds': ('parent', 'user_creds', 'user_creds_id', ), - 'delete_backup': ('name', ), - 'delete_backup_schedule': ('name', ), - 'delete_database': ('name', 'etag', ), - 'delete_index': ('name', ), - 'delete_user_creds': ('name', ), - 'disable_user_creds': ('name', ), - 'enable_user_creds': ('name', ), - 'export_documents': ('name', 'collection_ids', 'output_uri_prefix', 'namespace_ids', 'snapshot_time', ), - 'get_backup': ('name', ), - 'get_backup_schedule': ('name', ), - 'get_database': ('name', ), - 'get_field': ('name', ), - 'get_index': ('name', ), - 'get_user_creds': ('name', ), - 'import_documents': ('name', 'collection_ids', 'input_uri_prefix', 'namespace_ids', ), - 'list_backups': ('parent', 'filter', ), - 'list_backup_schedules': ('parent', ), - 'list_databases': ('parent', 'show_deleted', ), - 'list_fields': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_indexes': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_user_creds': ('parent', ), - 'reset_user_password': ('name', ), - 'restore_database': ('parent', 'database_id', 'backup', 'encryption_config', 'tags', ), - 'update_backup_schedule': ('backup_schedule', 'update_mask', ), - 'update_database': ('database', 'update_mask', ), - 'update_field': ('field', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=firestore_adminCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the firestore_admin client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/scripts/fixup_firestore_v1_keywords.py b/scripts/fixup_firestore_v1_keywords.py deleted file mode 100644 index 6481e76bb7..0000000000 --- a/scripts/fixup_firestore_v1_keywords.py +++ /dev/null @@ -1,197 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -try: - import libcst as cst -except ImportError: - raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.') - - - -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class firestoreCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_get_documents': ('database', 'documents', 'mask', 'transaction', 'new_transaction', 'read_time', ), - 'batch_write': ('database', 'writes', 'labels', ), - 'begin_transaction': ('database', 'options', ), - 'commit': ('database', 'writes', 'transaction', ), - 'create_document': ('parent', 'collection_id', 'document', 'document_id', 'mask', ), - 'delete_document': ('name', 'current_document', ), - 'get_document': ('name', 'mask', 'transaction', 'read_time', ), - 'list_collection_ids': ('parent', 'page_size', 'page_token', 'read_time', ), - 'list_documents': ('parent', 'collection_id', 'page_size', 'page_token', 'order_by', 'mask', 'transaction', 'read_time', 'show_missing', ), - 'listen': ('database', 'add_target', 'remove_target', 'labels', ), - 'partition_query': ('parent', 'structured_query', 'partition_count', 'page_token', 'page_size', 'read_time', ), - 'rollback': ('database', 'transaction', ), - 'run_aggregation_query': ('parent', 'structured_aggregation_query', 'transaction', 'new_transaction', 'read_time', 'explain_options', ), - 'run_query': ('parent', 'structured_query', 'transaction', 'new_transaction', 'read_time', 'explain_options', ), - 'update_document': ('document', 'update_mask', 'mask', 'current_document', ), - 'write': ('database', 'stream_id', 'writes', 'stream_token', 'labels', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=firestoreCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the firestore client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/setup.py b/setup.py deleted file mode 100644 index 8625abce96..0000000000 --- a/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import io -import os - -import setuptools - -# Package metadata. - -name = "google-cloud-firestore" -description = "Google Cloud Firestore API client library" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -version = {} -with open(os.path.join(package_root, "google/cloud/firestore/gapic_version.py")) as fp: - exec(fp.read(), version) -version = version["__version__"] -release_status = "Development Status :: 5 - Production/Stable" -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", - "google-cloud-core >= 1.4.1, <3.0.0", - "proto-plus >= 1.22.0, <2.0.0", - "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", - "proto-plus >= 1.25.0, <2.0.0; python_version>='3.13'", - "protobuf>=3.20.2,<7.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = {} - - -# Setup boilerplate below this line. - -package_root = os.path.abspath(os.path.dirname(__file__)) -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url="https://github.com/googleapis/python-firestore", - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.14", - "Operating System :: OS Independent", - "Topic :: Internet", - "Topic :: Software Development :: Libraries :: Python Modules", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - install_requires=dependencies, - extras_require=extras, - python_requires=">=3.7", - scripts=[ - "scripts/fixup_firestore_v1_keywords.py", - "scripts/fixup_firestore_admin_v1_keywords.py", - ], - include_package_data=True, - zip_safe=False, -) diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/tests/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/tests/unit/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/bundle/__init__.py b/tests/unit/gapic/bundle/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/tests/unit/gapic/bundle/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/firestore_admin_v1/__init__.py b/tests/unit/gapic/firestore_admin_v1/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/tests/unit/gapic/firestore_admin_v1/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py deleted file mode 100644 index 1b9184b8ae..0000000000 --- a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py +++ /dev/null @@ -1,27083 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os - -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.firestore_admin_v1.services.firestore_admin import ( - FirestoreAdminAsyncClient, -) -from google.cloud.firestore_admin_v1.services.firestore_admin import ( - FirestoreAdminClient, -) -from google.cloud.firestore_admin_v1.services.firestore_admin import pagers -from google.cloud.firestore_admin_v1.services.firestore_admin import transports -from google.cloud.firestore_admin_v1.types import backup -from google.cloud.firestore_admin_v1.types import database -from google.cloud.firestore_admin_v1.types import database as gfa_database -from google.cloud.firestore_admin_v1.types import field -from google.cloud.firestore_admin_v1.types import field as gfa_field -from google.cloud.firestore_admin_v1.types import firestore_admin -from google.cloud.firestore_admin_v1.types import index -from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.cloud.firestore_admin_v1.types import operation as gfa_operation -from google.cloud.firestore_admin_v1.types import schedule -from google.cloud.firestore_admin_v1.types import snapshot -from google.cloud.firestore_admin_v1.types import user_creds -from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds -from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -import google.auth - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return ( - "foo.googleapis.com" - if ("localhost" in client.DEFAULT_ENDPOINT) - else client.DEFAULT_ENDPOINT - ) - - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return ( - "test.{UNIVERSE_DOMAIN}" - if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) - else client._DEFAULT_ENDPOINT_TEMPLATE - ) - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert FirestoreAdminClient._get_default_mtls_endpoint(None) is None - assert ( - FirestoreAdminClient._get_default_mtls_endpoint(api_endpoint) - == api_mtls_endpoint - ) - assert ( - FirestoreAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint - ) - assert ( - FirestoreAdminClient._get_default_mtls_endpoint(sandbox_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - FirestoreAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - FirestoreAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - ) - - -def test__read_environment_variables(): - assert FirestoreAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert FirestoreAdminClient._read_environment_variables() == ( - True, - "auto", - None, - ) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert FirestoreAdminClient._read_environment_variables() == ( - False, - "auto", - None, - ) - - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - FirestoreAdminClient._read_environment_variables() - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert FirestoreAdminClient._read_environment_variables() == ( - False, - "never", - None, - ) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert FirestoreAdminClient._read_environment_variables() == ( - False, - "always", - None, - ) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert FirestoreAdminClient._read_environment_variables() == ( - False, - "auto", - None, - ) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - FirestoreAdminClient._read_environment_variables() - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert FirestoreAdminClient._read_environment_variables() == ( - False, - "auto", - "foo.com", - ) - - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert FirestoreAdminClient._get_client_cert_source(None, False) is None - assert ( - FirestoreAdminClient._get_client_cert_source(mock_provided_cert_source, False) - is None - ) - assert ( - FirestoreAdminClient._get_client_cert_source(mock_provided_cert_source, True) - == mock_provided_cert_source - ) - - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", return_value=True - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_default_cert_source, - ): - assert ( - FirestoreAdminClient._get_client_cert_source(None, True) - is mock_default_cert_source - ) - assert ( - FirestoreAdminClient._get_client_cert_source( - mock_provided_cert_source, "true" - ) - is mock_provided_cert_source - ) - - -@mock.patch.object( - FirestoreAdminClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAdminClient), -) -@mock.patch.object( - FirestoreAdminAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAdminAsyncClient), -) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE - default_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=default_universe - ) - mock_universe = "bar.com" - mock_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=mock_universe - ) - - assert ( - FirestoreAdminClient._get_api_endpoint( - api_override, mock_client_cert_source, default_universe, "always" - ) - == api_override - ) - assert ( - FirestoreAdminClient._get_api_endpoint( - None, mock_client_cert_source, default_universe, "auto" - ) - == FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - FirestoreAdminClient._get_api_endpoint(None, None, default_universe, "auto") - == default_endpoint - ) - assert ( - FirestoreAdminClient._get_api_endpoint(None, None, default_universe, "always") - == FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - FirestoreAdminClient._get_api_endpoint( - None, mock_client_cert_source, default_universe, "always" - ) - == FirestoreAdminClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - FirestoreAdminClient._get_api_endpoint(None, None, mock_universe, "never") - == mock_endpoint - ) - assert ( - FirestoreAdminClient._get_api_endpoint(None, None, default_universe, "never") - == default_endpoint - ) - - with pytest.raises(MutualTLSChannelError) as excinfo: - FirestoreAdminClient._get_api_endpoint( - None, mock_client_cert_source, mock_universe, "auto" - ) - assert ( - str(excinfo.value) - == "mTLS is not supported in any universe other than googleapis.com." - ) - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert ( - FirestoreAdminClient._get_universe_domain( - client_universe_domain, universe_domain_env - ) - == client_universe_domain - ) - assert ( - FirestoreAdminClient._get_universe_domain(None, universe_domain_env) - == universe_domain_env - ) - assert ( - FirestoreAdminClient._get_universe_domain(None, None) - == FirestoreAdminClient._DEFAULT_UNIVERSE - ) - - with pytest.raises(ValueError) as excinfo: - FirestoreAdminClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - - -@pytest.mark.parametrize( - "error_code,cred_info_json,show_cred_info", - [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False), - ], -) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = FirestoreAdminClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - - -@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = FirestoreAdminClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (FirestoreAdminClient, "grpc"), - (FirestoreAdminAsyncClient, "grpc_asyncio"), - (FirestoreAdminClient, "rest"), - ], -) -def test_firestore_admin_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_info" - ) as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "firestore.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://firestore.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_class,transport_name", - [ - (transports.FirestoreAdminGrpcTransport, "grpc"), - (transports.FirestoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.FirestoreAdminRestTransport, "rest"), - ], -) -def test_firestore_admin_client_service_account_always_use_jwt( - transport_class, transport_name -): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (FirestoreAdminClient, "grpc"), - (FirestoreAdminAsyncClient, "grpc_asyncio"), - (FirestoreAdminClient, "rest"), - ], -) -def test_firestore_admin_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: - factory.return_value = creds - client = client_class.from_service_account_file( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "firestore.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://firestore.googleapis.com" - ) - - -def test_firestore_admin_client_get_transport_class(): - transport = FirestoreAdminClient.get_transport_class() - available_transports = [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminRestTransport, - ] - assert transport in available_transports - - transport = FirestoreAdminClient.get_transport_class("grpc") - assert transport == transports.FirestoreAdminGrpcTransport - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"), - ( - FirestoreAdminAsyncClient, - transports.FirestoreAdminGrpcAsyncIOTransport, - "grpc_asyncio", - ), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest"), - ], -) -@mock.patch.object( - FirestoreAdminClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAdminClient), -) -@mock.patch.object( - FirestoreAdminAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAdminAsyncClient), -) -def test_firestore_admin_client_client_options( - client_class, transport_class, transport_name -): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(FirestoreAdminClient, "get_transport_class") as gtc: - transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(FirestoreAdminClient, "get_transport_class") as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions( - api_audience="https://language.googleapis.com" - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com", - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,use_client_cert_env", - [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", "true"), - ( - FirestoreAdminAsyncClient, - transports.FirestoreAdminGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc", "false"), - ( - FirestoreAdminAsyncClient, - transports.FirestoreAdminGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest", "true"), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest", "false"), - ], -) -@mock.patch.object( - FirestoreAdminClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAdminClient), -) -@mock.patch.object( - FirestoreAdminAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAdminAsyncClient), -) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_firestore_admin_client_mtls_env_auto( - client_class, transport_class, transport_name, use_client_cert_env -): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=client_cert_source_callback, - ): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class", [FirestoreAdminClient, FirestoreAdminAsyncClient] -) -@mock.patch.object( - FirestoreAdminClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(FirestoreAdminClient), -) -@mock.patch.object( - FirestoreAdminAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(FirestoreAdminAsyncClient), -) -def test_firestore_admin_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_client_cert_source, - ): - ( - api_endpoint, - cert_source, - ) = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - -@pytest.mark.parametrize( - "client_class", [FirestoreAdminClient, FirestoreAdminAsyncClient] -) -@mock.patch.object( - FirestoreAdminClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAdminClient), -) -@mock.patch.object( - FirestoreAdminAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAdminAsyncClient), -) -def test_firestore_admin_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = FirestoreAdminClient._DEFAULT_UNIVERSE - default_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=default_universe - ) - mock_universe = "bar.com" - mock_endpoint = FirestoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=mock_universe - ) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ): - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=api_override - ) - client = client_class( - client_options=options, - credentials=ga_credentials.AnonymousCredentials(), - ) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - else: - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - assert client.api_endpoint == ( - mock_endpoint if universe_exists else default_endpoint - ) - assert client.universe_domain == ( - mock_universe if universe_exists else default_universe - ) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport, "grpc"), - ( - FirestoreAdminAsyncClient, - transports.FirestoreAdminGrpcAsyncIOTransport, - "grpc_asyncio", - ), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest"), - ], -) -def test_firestore_admin_client_client_options_scopes( - client_class, transport_class, transport_name -): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - FirestoreAdminClient, - transports.FirestoreAdminGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - FirestoreAdminAsyncClient, - transports.FirestoreAdminGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - (FirestoreAdminClient, transports.FirestoreAdminRestTransport, "rest", None), - ], -) -def test_firestore_admin_client_client_options_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -def test_firestore_admin_client_client_options_from_dict(): - with mock.patch( - "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = FirestoreAdminClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - FirestoreAdminClient, - transports.FirestoreAdminGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - FirestoreAdminAsyncClient, - transports.FirestoreAdminGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_firestore_admin_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "firestore.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - scopes=None, - default_host="firestore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CreateIndexRequest, - dict, - ], -) -def test_create_index(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_index_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.CreateIndexRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_index(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateIndexRequest( - parent="parent_value", - ) - - -def test_create_index_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_index in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_index] = mock_rpc - request = {} - client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_index(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_index_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_index - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_index - ] = mock_rpc - - request = {} - await client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_index(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_index_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.CreateIndexRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_index_async_from_dict(): - await test_create_index_async(request_type=dict) - - -def test_create_index_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateIndexRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_index_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateIndexRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_create_index_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_index( - parent="parent_value", - index=gfa_index.Index(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].index - mock_val = gfa_index.Index(name="name_value") - assert arg == mock_val - - -def test_create_index_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_index( - firestore_admin.CreateIndexRequest(), - parent="parent_value", - index=gfa_index.Index(name="name_value"), - ) - - -@pytest.mark.asyncio -async def test_create_index_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_index( - parent="parent_value", - index=gfa_index.Index(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].index - mock_val = gfa_index.Index(name="name_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_index_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_index( - firestore_admin.CreateIndexRequest(), - parent="parent_value", - index=gfa_index.Index(name="name_value"), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListIndexesRequest, - dict, - ], -) -def test_list_indexes(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListIndexesResponse( - next_page_token="next_page_token_value", - ) - response = client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListIndexesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_indexes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ListIndexesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_indexes(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListIndexesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_indexes_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_indexes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_indexes] = mock_rpc - request = {} - client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_indexes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_indexes_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.list_indexes - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_indexes - ] = mock_rpc - - request = {} - await client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_indexes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_indexes_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.ListIndexesRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListIndexesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListIndexesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_indexes_async_from_dict(): - await test_list_indexes_async(request_type=dict) - - -def test_list_indexes_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListIndexesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - call.return_value = firestore_admin.ListIndexesResponse() - client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_indexes_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListIndexesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListIndexesResponse() - ) - await client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_indexes_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListIndexesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_indexes( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_indexes_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_indexes( - firestore_admin.ListIndexesRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_indexes_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListIndexesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListIndexesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_indexes( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_indexes_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_indexes( - firestore_admin.ListIndexesRequest(), - parent="parent_value", - ) - - -def test_list_indexes_pager(transport_name: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token="abc", - ), - firestore_admin.ListIndexesResponse( - indexes=[], - next_page_token="def", - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token="ghi", - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_indexes(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, index.Index) for i in results) - - -def test_list_indexes_pages(transport_name: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token="abc", - ), - firestore_admin.ListIndexesResponse( - indexes=[], - next_page_token="def", - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token="ghi", - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - pages = list(client.list_indexes(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_indexes_async_pager(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token="abc", - ), - firestore_admin.ListIndexesResponse( - indexes=[], - next_page_token="def", - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token="ghi", - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_indexes( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, index.Index) for i in responses) - - -@pytest.mark.asyncio -async def test_list_indexes_async_pages(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token="abc", - ), - firestore_admin.ListIndexesResponse( - indexes=[], - next_page_token="def", - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token="ghi", - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_indexes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetIndexRequest, - dict, - ], -) -def test_get_index(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = index.Index( - name="name_value", - query_scope=index.Index.QueryScope.COLLECTION, - api_scope=index.Index.ApiScope.DATASTORE_MODE_API, - state=index.Index.State.CREATING, - density=index.Index.Density.SPARSE_ALL, - multikey=True, - shard_count=1178, - ) - response = client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.name == "name_value" - assert response.query_scope == index.Index.QueryScope.COLLECTION - assert response.api_scope == index.Index.ApiScope.DATASTORE_MODE_API - assert response.state == index.Index.State.CREATING - assert response.density == index.Index.Density.SPARSE_ALL - assert response.multikey is True - assert response.shard_count == 1178 - - -def test_get_index_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.GetIndexRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_index(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetIndexRequest( - name="name_value", - ) - - -def test_get_index_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_index in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_index] = mock_rpc - request = {} - client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_index(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_index_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_index - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_index - ] = mock_rpc - - request = {} - await client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_index(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_index_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.GetIndexRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - index.Index( - name="name_value", - query_scope=index.Index.QueryScope.COLLECTION, - api_scope=index.Index.ApiScope.DATASTORE_MODE_API, - state=index.Index.State.CREATING, - density=index.Index.Density.SPARSE_ALL, - multikey=True, - shard_count=1178, - ) - ) - response = await client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.name == "name_value" - assert response.query_scope == index.Index.QueryScope.COLLECTION - assert response.api_scope == index.Index.ApiScope.DATASTORE_MODE_API - assert response.state == index.Index.State.CREATING - assert response.density == index.Index.Density.SPARSE_ALL - assert response.multikey is True - assert response.shard_count == 1178 - - -@pytest.mark.asyncio -async def test_get_index_async_from_dict(): - await test_get_index_async(request_type=dict) - - -def test_get_index_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetIndexRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - call.return_value = index.Index() - client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_index_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetIndexRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index()) - await client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_index_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = index.Index() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_index( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_index_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_index( - firestore_admin.GetIndexRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_index_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = index.Index() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_index( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_index_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_index( - firestore_admin.GetIndexRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteIndexRequest, - dict, - ], -) -def test_delete_index(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_index_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.DeleteIndexRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_index(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteIndexRequest( - name="name_value", - ) - - -def test_delete_index_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_index in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_index] = mock_rpc - request = {} - client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_index(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_index_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_index - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_index - ] = mock_rpc - - request = {} - await client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_index(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_index_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.DeleteIndexRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_index_async_from_dict(): - await test_delete_index_async(request_type=dict) - - -def test_delete_index_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteIndexRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - call.return_value = None - client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_index_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteIndexRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_delete_index_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_index( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_index_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_index( - firestore_admin.DeleteIndexRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_index_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_index( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_index_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_index( - firestore_admin.DeleteIndexRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetFieldRequest, - dict, - ], -) -def test_get_field(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = field.Field( - name="name_value", - ) - response = client.get_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetFieldRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, field.Field) - assert response.name == "name_value" - - -def test_get_field_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.GetFieldRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_field(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetFieldRequest( - name="name_value", - ) - - -def test_get_field_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_field in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_field] = mock_rpc - request = {} - client.get_field(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_field(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_field_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_field - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_field - ] = mock_rpc - - request = {} - await client.get_field(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_field(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_field_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.GetFieldRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - field.Field( - name="name_value", - ) - ) - response = await client.get_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetFieldRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, field.Field) - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_get_field_async_from_dict(): - await test_get_field_async(request_type=dict) - - -def test_get_field_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetFieldRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - call.return_value = field.Field() - client.get_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_field_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetFieldRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(field.Field()) - await client.get_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_field_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = field.Field() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_field( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_field_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_field( - firestore_admin.GetFieldRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_field_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = field.Field() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(field.Field()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_field( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_field_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_field( - firestore_admin.GetFieldRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.UpdateFieldRequest, - dict, - ], -) -def test_update_field(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateFieldRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_field_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.UpdateFieldRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_field(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateFieldRequest() - - -def test_update_field_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_field in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_field] = mock_rpc - request = {} - client.update_field(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_field(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_update_field_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_field - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_field - ] = mock_rpc - - request = {} - await client.update_field(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_field(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_update_field_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.UpdateFieldRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.update_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateFieldRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_field_async_from_dict(): - await test_update_field_async(request_type=dict) - - -def test_update_field_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateFieldRequest() - - request.field.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "field.name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_field_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateFieldRequest() - - request.field.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.update_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "field.name=name_value", - ) in kw["metadata"] - - -def test_update_field_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_field( - field=gfa_field.Field(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].field - mock_val = gfa_field.Field(name="name_value") - assert arg == mock_val - - -def test_update_field_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_field( - firestore_admin.UpdateFieldRequest(), - field=gfa_field.Field(name="name_value"), - ) - - -@pytest.mark.asyncio -async def test_update_field_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_field( - field=gfa_field.Field(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].field - mock_val = gfa_field.Field(name="name_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_field_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_field( - firestore_admin.UpdateFieldRequest(), - field=gfa_field.Field(name="name_value"), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListFieldsRequest, - dict, - ], -) -def test_list_fields(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListFieldsResponse( - next_page_token="next_page_token_value", - ) - response = client.list_fields(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListFieldsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFieldsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_fields_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ListFieldsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_fields(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListFieldsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_fields_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_fields in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_fields] = mock_rpc - request = {} - client.list_fields(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_fields(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_fields_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.list_fields - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_fields - ] = mock_rpc - - request = {} - await client.list_fields(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_fields(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_fields_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.ListFieldsRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListFieldsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_fields(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListFieldsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFieldsAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_fields_async_from_dict(): - await test_list_fields_async(request_type=dict) - - -def test_list_fields_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListFieldsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - call.return_value = firestore_admin.ListFieldsResponse() - client.list_fields(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_fields_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListFieldsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListFieldsResponse() - ) - await client.list_fields(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_fields_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListFieldsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_fields( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_fields_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_fields( - firestore_admin.ListFieldsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_fields_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListFieldsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListFieldsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_fields( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_fields_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_fields( - firestore_admin.ListFieldsRequest(), - parent="parent_value", - ) - - -def test_list_fields_pager(transport_name: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - field.Field(), - ], - next_page_token="abc", - ), - firestore_admin.ListFieldsResponse( - fields=[], - next_page_token="def", - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - ], - next_page_token="ghi", - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_fields(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, field.Field) for i in results) - - -def test_list_fields_pages(transport_name: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - field.Field(), - ], - next_page_token="abc", - ), - firestore_admin.ListFieldsResponse( - fields=[], - next_page_token="def", - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - ], - next_page_token="ghi", - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - ], - ), - RuntimeError, - ) - pages = list(client.list_fields(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_fields_async_pager(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - field.Field(), - ], - next_page_token="abc", - ), - firestore_admin.ListFieldsResponse( - fields=[], - next_page_token="def", - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - ], - next_page_token="ghi", - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_fields( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, field.Field) for i in responses) - - -@pytest.mark.asyncio -async def test_list_fields_async_pages(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_fields), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - field.Field(), - ], - next_page_token="abc", - ), - firestore_admin.ListFieldsResponse( - fields=[], - next_page_token="def", - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - ], - next_page_token="ghi", - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_fields(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ExportDocumentsRequest, - dict, - ], -) -def test_export_documents(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.export_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ExportDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_export_documents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ExportDocumentsRequest( - name="name_value", - output_uri_prefix="output_uri_prefix_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.export_documents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ExportDocumentsRequest( - name="name_value", - output_uri_prefix="output_uri_prefix_value", - ) - - -def test_export_documents_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_documents in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.export_documents - ] = mock_rpc - request = {} - client.export_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_export_documents_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.export_documents - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.export_documents - ] = mock_rpc - - request = {} - await client.export_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.export_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_export_documents_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.ExportDocumentsRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.export_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ExportDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_export_documents_async_from_dict(): - await test_export_documents_async(request_type=dict) - - -def test_export_documents_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ExportDocumentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.export_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_export_documents_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ExportDocumentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.export_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_export_documents_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.export_documents( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_export_documents_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_documents( - firestore_admin.ExportDocumentsRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_export_documents_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.export_documents( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_export_documents_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.export_documents( - firestore_admin.ExportDocumentsRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ImportDocumentsRequest, - dict, - ], -) -def test_import_documents(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.import_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ImportDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_import_documents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ImportDocumentsRequest( - name="name_value", - input_uri_prefix="input_uri_prefix_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.import_documents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ImportDocumentsRequest( - name="name_value", - input_uri_prefix="input_uri_prefix_value", - ) - - -def test_import_documents_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.import_documents in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.import_documents - ] = mock_rpc - request = {} - client.import_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.import_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_import_documents_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.import_documents - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.import_documents - ] = mock_rpc - - request = {} - await client.import_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.import_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_import_documents_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.ImportDocumentsRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.import_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ImportDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_import_documents_async_from_dict(): - await test_import_documents_async(request_type=dict) - - -def test_import_documents_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ImportDocumentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.import_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_import_documents_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ImportDocumentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.import_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_import_documents_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.import_documents( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_import_documents_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.import_documents( - firestore_admin.ImportDocumentsRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_import_documents_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.import_documents( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_import_documents_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.import_documents( - firestore_admin.ImportDocumentsRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.BulkDeleteDocumentsRequest, - dict, - ], -) -def test_bulk_delete_documents(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.bulk_delete_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.BulkDeleteDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_bulk_delete_documents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.BulkDeleteDocumentsRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.bulk_delete_documents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.BulkDeleteDocumentsRequest( - name="name_value", - ) - - -def test_bulk_delete_documents_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.bulk_delete_documents - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.bulk_delete_documents - ] = mock_rpc - request = {} - client.bulk_delete_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.bulk_delete_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_bulk_delete_documents_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.bulk_delete_documents - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.bulk_delete_documents - ] = mock_rpc - - request = {} - await client.bulk_delete_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.bulk_delete_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_bulk_delete_documents_async( - transport: str = "grpc_asyncio", - request_type=firestore_admin.BulkDeleteDocumentsRequest, -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.bulk_delete_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.BulkDeleteDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_bulk_delete_documents_async_from_dict(): - await test_bulk_delete_documents_async(request_type=dict) - - -def test_bulk_delete_documents_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.BulkDeleteDocumentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.bulk_delete_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_bulk_delete_documents_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.BulkDeleteDocumentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.bulk_delete_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_bulk_delete_documents_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.bulk_delete_documents( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_bulk_delete_documents_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.bulk_delete_documents( - firestore_admin.BulkDeleteDocumentsRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_bulk_delete_documents_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.bulk_delete_documents( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_bulk_delete_documents_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.bulk_delete_documents( - firestore_admin.BulkDeleteDocumentsRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CreateDatabaseRequest, - dict, - ], -) -def test_create_database(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - -def test_create_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - request = {} - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_database_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_database - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_database - ] = mock_rpc - - request = {} - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_database_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.CreateDatabaseRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_database_async_from_dict(): - await test_create_database_async(request_type=dict) - - -def test_create_database_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateDatabaseRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_database_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateDatabaseRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_create_database_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_database( - parent="parent_value", - database=gfa_database.Database(name="name_value"), - database_id="database_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].database - mock_val = gfa_database.Database(name="name_value") - assert arg == mock_val - arg = args[0].database_id - mock_val = "database_id_value" - assert arg == mock_val - - -def test_create_database_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - firestore_admin.CreateDatabaseRequest(), - parent="parent_value", - database=gfa_database.Database(name="name_value"), - database_id="database_id_value", - ) - - -@pytest.mark.asyncio -async def test_create_database_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_database( - parent="parent_value", - database=gfa_database.Database(name="name_value"), - database_id="database_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].database - mock_val = gfa_database.Database(name="name_value") - assert arg == mock_val - arg = args[0].database_id - mock_val = "database_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_database_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_database( - firestore_admin.CreateDatabaseRequest(), - parent="parent_value", - database=gfa_database.Database(name="name_value"), - database_id="database_id_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetDatabaseRequest, - dict, - ], -) -def test_get_database(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = database.Database( - name="name_value", - uid="uid_value", - location_id="location_id_value", - type_=database.Database.DatabaseType.FIRESTORE_NATIVE, - concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, - point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, - app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, - key_prefix="key_prefix_value", - delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, - previous_id="previous_id_value", - free_tier=True, - etag="etag_value", - database_edition=database.Database.DatabaseEdition.STANDARD, - ) - response = client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, database.Database) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.location_id == "location_id_value" - assert response.type_ == database.Database.DatabaseType.FIRESTORE_NATIVE - assert response.concurrency_mode == database.Database.ConcurrencyMode.OPTIMISTIC - assert ( - response.point_in_time_recovery_enablement - == database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED - ) - assert ( - response.app_engine_integration_mode - == database.Database.AppEngineIntegrationMode.ENABLED - ) - assert response.key_prefix == "key_prefix_value" - assert ( - response.delete_protection_state - == database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED - ) - assert response.previous_id == "previous_id_value" - assert response.free_tier is True - assert response.etag == "etag_value" - assert response.database_edition == database.Database.DatabaseEdition.STANDARD - - -def test_get_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.GetDatabaseRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetDatabaseRequest( - name="name_value", - ) - - -def test_get_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_database] = mock_rpc - request = {} - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_database_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_database - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_database - ] = mock_rpc - - request = {} - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_database_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.GetDatabaseRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - database.Database( - name="name_value", - uid="uid_value", - location_id="location_id_value", - type_=database.Database.DatabaseType.FIRESTORE_NATIVE, - concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, - point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, - app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, - key_prefix="key_prefix_value", - delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, - previous_id="previous_id_value", - free_tier=True, - etag="etag_value", - database_edition=database.Database.DatabaseEdition.STANDARD, - ) - ) - response = await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, database.Database) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.location_id == "location_id_value" - assert response.type_ == database.Database.DatabaseType.FIRESTORE_NATIVE - assert response.concurrency_mode == database.Database.ConcurrencyMode.OPTIMISTIC - assert ( - response.point_in_time_recovery_enablement - == database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED - ) - assert ( - response.app_engine_integration_mode - == database.Database.AppEngineIntegrationMode.ENABLED - ) - assert response.key_prefix == "key_prefix_value" - assert ( - response.delete_protection_state - == database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED - ) - assert response.previous_id == "previous_id_value" - assert response.free_tier is True - assert response.etag == "etag_value" - assert response.database_edition == database.Database.DatabaseEdition.STANDARD - - -@pytest.mark.asyncio -async def test_get_database_async_from_dict(): - await test_get_database_async(request_type=dict) - - -def test_get_database_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetDatabaseRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - call.return_value = database.Database() - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_database_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetDatabaseRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(database.Database()) - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_database_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = database.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_database( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_database_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - firestore_admin.GetDatabaseRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_database_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = database.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(database.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_database( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_database_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_database( - firestore_admin.GetDatabaseRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListDatabasesRequest, - dict, - ], -) -def test_list_databases(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListDatabasesResponse( - unreachable=["unreachable_value"], - ) - response = client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListDatabasesResponse) - assert response.unreachable == ["unreachable_value"] - - -def test_list_databases_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ListDatabasesRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_databases(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListDatabasesRequest( - parent="parent_value", - ) - - -def test_list_databases_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc - request = {} - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_databases_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.list_databases - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_databases - ] = mock_rpc - - request = {} - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_databases_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.ListDatabasesRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListDatabasesResponse( - unreachable=["unreachable_value"], - ) - ) - response = await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListDatabasesResponse) - assert response.unreachable == ["unreachable_value"] - - -@pytest.mark.asyncio -async def test_list_databases_async_from_dict(): - await test_list_databases_async(request_type=dict) - - -def test_list_databases_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListDatabasesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - call.return_value = firestore_admin.ListDatabasesResponse() - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_databases_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListDatabasesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListDatabasesResponse() - ) - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_databases_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListDatabasesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_databases( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_databases_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - firestore_admin.ListDatabasesRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_databases_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListDatabasesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListDatabasesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_databases( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_databases_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_databases( - firestore_admin.ListDatabasesRequest(), - parent="parent_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.UpdateDatabaseRequest, - dict, - ], -) -def test_update_database(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.UpdateDatabaseRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateDatabaseRequest() - - -def test_update_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc - request = {} - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_update_database_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_database - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_database - ] = mock_rpc - - request = {} - await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_update_database_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.UpdateDatabaseRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_database_async_from_dict(): - await test_update_database_async(request_type=dict) - - -def test_update_database_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateDatabaseRequest() - - request.database.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database.name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_database_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateDatabaseRequest() - - request.database.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database.name=name_value", - ) in kw["metadata"] - - -def test_update_database_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_database( - database=gfa_database.Database(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = gfa_database.Database(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -def test_update_database_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - firestore_admin.UpdateDatabaseRequest(), - database=gfa_database.Database(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.asyncio -async def test_update_database_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_database( - database=gfa_database.Database(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = gfa_database.Database(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_database_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_database( - firestore_admin.UpdateDatabaseRequest(), - database=gfa_database.Database(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteDatabaseRequest, - dict, - ], -) -def test_delete_database(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.DeleteDatabaseRequest( - name="name_value", - etag="etag_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteDatabaseRequest( - name="name_value", - etag="etag_value", - ) - - -def test_delete_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_database] = mock_rpc - request = {} - client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_database_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_database - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_database - ] = mock_rpc - - request = {} - await client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_database_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.DeleteDatabaseRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_database_async_from_dict(): - await test_delete_database_async(request_type=dict) - - -def test_delete_database_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteDatabaseRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_database_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteDatabaseRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_delete_database_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_database( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_database_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_database( - firestore_admin.DeleteDatabaseRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_database_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_database( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_database_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_database( - firestore_admin.DeleteDatabaseRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CreateUserCredsRequest, - dict, - ], -) -def test_create_user_creds(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = gfa_user_creds.UserCreds( - name="name_value", - state=gfa_user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - response = client.create_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gfa_user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == gfa_user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -def test_create_user_creds_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.CreateUserCredsRequest( - parent="parent_value", - user_creds_id="user_creds_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_user_creds), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_user_creds(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateUserCredsRequest( - parent="parent_value", - user_creds_id="user_creds_id_value", - ) - - -def test_create_user_creds_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_user_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_user_creds - ] = mock_rpc - request = {} - client.create_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_user_creds_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_user_creds - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_user_creds - ] = mock_rpc - - request = {} - await client.create_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_user_creds_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.CreateUserCredsRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gfa_user_creds.UserCreds( - name="name_value", - state=gfa_user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - ) - response = await client.create_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gfa_user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == gfa_user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -@pytest.mark.asyncio -async def test_create_user_creds_async_from_dict(): - await test_create_user_creds_async(request_type=dict) - - -def test_create_user_creds_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateUserCredsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_user_creds), "__call__" - ) as call: - call.return_value = gfa_user_creds.UserCreds() - client.create_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_user_creds_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateUserCredsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_user_creds), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gfa_user_creds.UserCreds() - ) - await client.create_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_create_user_creds_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = gfa_user_creds.UserCreds() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_user_creds( - parent="parent_value", - user_creds=gfa_user_creds.UserCreds(name="name_value"), - user_creds_id="user_creds_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].user_creds - mock_val = gfa_user_creds.UserCreds(name="name_value") - assert arg == mock_val - arg = args[0].user_creds_id - mock_val = "user_creds_id_value" - assert arg == mock_val - - -def test_create_user_creds_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_user_creds( - firestore_admin.CreateUserCredsRequest(), - parent="parent_value", - user_creds=gfa_user_creds.UserCreds(name="name_value"), - user_creds_id="user_creds_id_value", - ) - - -@pytest.mark.asyncio -async def test_create_user_creds_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = gfa_user_creds.UserCreds() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gfa_user_creds.UserCreds() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_user_creds( - parent="parent_value", - user_creds=gfa_user_creds.UserCreds(name="name_value"), - user_creds_id="user_creds_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].user_creds - mock_val = gfa_user_creds.UserCreds(name="name_value") - assert arg == mock_val - arg = args[0].user_creds_id - mock_val = "user_creds_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_user_creds_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_user_creds( - firestore_admin.CreateUserCredsRequest(), - parent="parent_value", - user_creds=gfa_user_creds.UserCreds(name="name_value"), - user_creds_id="user_creds_id_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetUserCredsRequest, - dict, - ], -) -def test_get_user_creds(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user_creds), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - response = client.get_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -def test_get_user_creds_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.GetUserCredsRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user_creds), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_user_creds(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetUserCredsRequest( - name="name_value", - ) - - -def test_get_user_creds_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_user_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_user_creds] = mock_rpc - request = {} - client.get_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_user_creds_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_user_creds - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_user_creds - ] = mock_rpc - - request = {} - await client.get_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_user_creds_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.GetUserCredsRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user_creds), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - ) - response = await client.get_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -@pytest.mark.asyncio -async def test_get_user_creds_async_from_dict(): - await test_get_user_creds_async(request_type=dict) - - -def test_get_user_creds_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetUserCredsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user_creds), "__call__") as call: - call.return_value = user_creds.UserCreds() - client.get_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_user_creds_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetUserCredsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user_creds), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds() - ) - await client.get_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_user_creds_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user_creds), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_user_creds( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_user_creds_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_user_creds( - firestore_admin.GetUserCredsRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_user_creds_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user_creds), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_user_creds( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_user_creds_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_user_creds( - firestore_admin.GetUserCredsRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListUserCredsRequest, - dict, - ], -) -def test_list_user_creds(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_user_creds), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListUserCredsResponse() - response = client.list_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListUserCredsResponse) - - -def test_list_user_creds_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ListUserCredsRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_user_creds), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_user_creds(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListUserCredsRequest( - parent="parent_value", - ) - - -def test_list_user_creds_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_user_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_user_creds] = mock_rpc - request = {} - client.list_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_user_creds_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.list_user_creds - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_user_creds - ] = mock_rpc - - request = {} - await client.list_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_user_creds_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.ListUserCredsRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_user_creds), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListUserCredsResponse() - ) - response = await client.list_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListUserCredsResponse) - - -@pytest.mark.asyncio -async def test_list_user_creds_async_from_dict(): - await test_list_user_creds_async(request_type=dict) - - -def test_list_user_creds_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListUserCredsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_user_creds), "__call__") as call: - call.return_value = firestore_admin.ListUserCredsResponse() - client.list_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_user_creds_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListUserCredsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_user_creds), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListUserCredsResponse() - ) - await client.list_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_user_creds_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_user_creds), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListUserCredsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_user_creds( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_user_creds_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_user_creds( - firestore_admin.ListUserCredsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_user_creds_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_user_creds), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListUserCredsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListUserCredsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_user_creds( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_user_creds_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_user_creds( - firestore_admin.ListUserCredsRequest(), - parent="parent_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.EnableUserCredsRequest, - dict, - ], -) -def test_enable_user_creds(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - response = client.enable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.EnableUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -def test_enable_user_creds_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.EnableUserCredsRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_user_creds), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.enable_user_creds(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.EnableUserCredsRequest( - name="name_value", - ) - - -def test_enable_user_creds_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.enable_user_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.enable_user_creds - ] = mock_rpc - request = {} - client.enable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.enable_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_enable_user_creds_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.enable_user_creds - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.enable_user_creds - ] = mock_rpc - - request = {} - await client.enable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.enable_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_enable_user_creds_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.EnableUserCredsRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - ) - response = await client.enable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.EnableUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -@pytest.mark.asyncio -async def test_enable_user_creds_async_from_dict(): - await test_enable_user_creds_async(request_type=dict) - - -def test_enable_user_creds_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.EnableUserCredsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_user_creds), "__call__" - ) as call: - call.return_value = user_creds.UserCreds() - client.enable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_enable_user_creds_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.EnableUserCredsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_user_creds), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds() - ) - await client.enable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_enable_user_creds_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.enable_user_creds( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_enable_user_creds_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.enable_user_creds( - firestore_admin.EnableUserCredsRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_enable_user_creds_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.enable_user_creds( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_enable_user_creds_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.enable_user_creds( - firestore_admin.EnableUserCredsRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DisableUserCredsRequest, - dict, - ], -) -def test_disable_user_creds(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - response = client.disable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.DisableUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -def test_disable_user_creds_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.DisableUserCredsRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_user_creds), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.disable_user_creds(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DisableUserCredsRequest( - name="name_value", - ) - - -def test_disable_user_creds_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.disable_user_creds in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.disable_user_creds - ] = mock_rpc - request = {} - client.disable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.disable_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_disable_user_creds_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.disable_user_creds - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.disable_user_creds - ] = mock_rpc - - request = {} - await client.disable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.disable_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_disable_user_creds_async( - transport: str = "grpc_asyncio", - request_type=firestore_admin.DisableUserCredsRequest, -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - ) - response = await client.disable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.DisableUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -@pytest.mark.asyncio -async def test_disable_user_creds_async_from_dict(): - await test_disable_user_creds_async(request_type=dict) - - -def test_disable_user_creds_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DisableUserCredsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_user_creds), "__call__" - ) as call: - call.return_value = user_creds.UserCreds() - client.disable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_disable_user_creds_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DisableUserCredsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_user_creds), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds() - ) - await client.disable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_disable_user_creds_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.disable_user_creds( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_disable_user_creds_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.disable_user_creds( - firestore_admin.DisableUserCredsRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_disable_user_creds_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.disable_user_creds( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_disable_user_creds_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.disable_user_creds( - firestore_admin.DisableUserCredsRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ResetUserPasswordRequest, - dict, - ], -) -def test_reset_user_password(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_user_password), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - response = client.reset_user_password(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ResetUserPasswordRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -def test_reset_user_password_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ResetUserPasswordRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_user_password), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.reset_user_password(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ResetUserPasswordRequest( - name="name_value", - ) - - -def test_reset_user_password_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.reset_user_password in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.reset_user_password - ] = mock_rpc - request = {} - client.reset_user_password(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.reset_user_password(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_reset_user_password_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.reset_user_password - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.reset_user_password - ] = mock_rpc - - request = {} - await client.reset_user_password(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.reset_user_password(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_reset_user_password_async( - transport: str = "grpc_asyncio", - request_type=firestore_admin.ResetUserPasswordRequest, -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_user_password), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - ) - response = await client.reset_user_password(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ResetUserPasswordRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -@pytest.mark.asyncio -async def test_reset_user_password_async_from_dict(): - await test_reset_user_password_async(request_type=dict) - - -def test_reset_user_password_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ResetUserPasswordRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_user_password), "__call__" - ) as call: - call.return_value = user_creds.UserCreds() - client.reset_user_password(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_reset_user_password_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ResetUserPasswordRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_user_password), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds() - ) - await client.reset_user_password(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_reset_user_password_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_user_password), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.reset_user_password( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_reset_user_password_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.reset_user_password( - firestore_admin.ResetUserPasswordRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_reset_user_password_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_user_password), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = user_creds.UserCreds() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.reset_user_password( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_reset_user_password_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.reset_user_password( - firestore_admin.ResetUserPasswordRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteUserCredsRequest, - dict, - ], -) -def test_delete_user_creds(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_user_creds_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.DeleteUserCredsRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_user_creds), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_user_creds(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteUserCredsRequest( - name="name_value", - ) - - -def test_delete_user_creds_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_user_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_user_creds - ] = mock_rpc - request = {} - client.delete_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_user_creds_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_user_creds - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_user_creds - ] = mock_rpc - - request = {} - await client.delete_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_user_creds_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.DeleteUserCredsRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteUserCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_user_creds_async_from_dict(): - await test_delete_user_creds_async(request_type=dict) - - -def test_delete_user_creds_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteUserCredsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_user_creds), "__call__" - ) as call: - call.return_value = None - client.delete_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_user_creds_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteUserCredsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_user_creds), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_delete_user_creds_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_user_creds( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_user_creds_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_user_creds( - firestore_admin.DeleteUserCredsRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_user_creds_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_user_creds( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_user_creds_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_user_creds( - firestore_admin.DeleteUserCredsRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetBackupRequest, - dict, - ], -) -def test_get_backup(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup( - name="name_value", - database="database_value", - database_uid="database_uid_value", - state=backup.Backup.State.CREATING, - ) - response = client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.name == "name_value" - assert response.database == "database_value" - assert response.database_uid == "database_uid_value" - assert response.state == backup.Backup.State.CREATING - - -def test_get_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.GetBackupRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupRequest( - name="name_value", - ) - - -def test_get_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc - request = {} - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_backup - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_backup - ] = mock_rpc - - request = {} - await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_backup_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.GetBackupRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backup.Backup( - name="name_value", - database="database_value", - database_uid="database_uid_value", - state=backup.Backup.State.CREATING, - ) - ) - response = await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.name == "name_value" - assert response.database == "database_value" - assert response.database_uid == "database_uid_value" - assert response.state == backup.Backup.State.CREATING - - -@pytest.mark.asyncio -async def test_get_backup_async_from_dict(): - await test_get_backup_async(request_type=dict) - - -def test_get_backup_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetBackupRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = backup.Backup() - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_backup_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetBackupRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) - await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_backup_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_backup_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup( - firestore_admin.GetBackupRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_backup_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_backup_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup( - firestore_admin.GetBackupRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListBackupsRequest, - dict, - ], -) -def test_list_backups(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupsResponse( - unreachable=["unreachable_value"], - ) - response = client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListBackupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupsResponse) - assert response.unreachable == ["unreachable_value"] - - -def test_list_backups_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ListBackupsRequest( - parent="parent_value", - filter="filter_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_backups(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupsRequest( - parent="parent_value", - filter="filter_value", - ) - - -def test_list_backups_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc - request = {} - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_backups_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.list_backups - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_backups - ] = mock_rpc - - request = {} - await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_backups_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.ListBackupsRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListBackupsResponse( - unreachable=["unreachable_value"], - ) - ) - response = await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListBackupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupsResponse) - assert response.unreachable == ["unreachable_value"] - - -@pytest.mark.asyncio -async def test_list_backups_async_from_dict(): - await test_list_backups_async(request_type=dict) - - -def test_list_backups_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListBackupsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - call.return_value = firestore_admin.ListBackupsResponse() - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_backups_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListBackupsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListBackupsResponse() - ) - await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_backups_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backups( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_backups_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - firestore_admin.ListBackupsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_backups_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListBackupsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backups( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_backups_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backups( - firestore_admin.ListBackupsRequest(), - parent="parent_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteBackupRequest, - dict, - ], -) -def test_delete_backup(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.DeleteBackupRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupRequest( - name="name_value", - ) - - -def test_delete_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc - request = {} - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_backup_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_backup - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_backup - ] = mock_rpc - - request = {} - await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_backup_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.DeleteBackupRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_backup_async_from_dict(): - await test_delete_backup_async(request_type=dict) - - -def test_delete_backup_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteBackupRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - call.return_value = None - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_backup_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteBackupRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_delete_backup_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_backup_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup( - firestore_admin.DeleteBackupRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_backup_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_backup_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup( - firestore_admin.DeleteBackupRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.RestoreDatabaseRequest, - dict, - ], -) -def test_restore_database(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.RestoreDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_restore_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.RestoreDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - backup="backup_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_database), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.restore_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.RestoreDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - backup="backup_value", - ) - - -def test_restore_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restore_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.restore_database - ] = mock_rpc - request = {} - client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_restore_database_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.restore_database - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.restore_database - ] = mock_rpc - - request = {} - await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.restore_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_restore_database_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.RestoreDatabaseRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.RestoreDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_restore_database_async_from_dict(): - await test_restore_database_async(request_type=dict) - - -def test_restore_database_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.RestoreDatabaseRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_database), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_restore_database_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.RestoreDatabaseRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_database), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CreateBackupScheduleRequest, - dict, - ], -) -def test_create_backup_schedule(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule( - name="name_value", - ) - response = client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" - - -def test_create_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.CreateBackupScheduleRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateBackupScheduleRequest( - parent="parent_value", - ) - - -def test_create_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_backup_schedule - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_backup_schedule - ] = mock_rpc - request = {} - client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_backup_schedule_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_backup_schedule - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_backup_schedule - ] = mock_rpc - - request = {} - await client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_backup_schedule_async( - transport: str = "grpc_asyncio", - request_type=firestore_admin.CreateBackupScheduleRequest, -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule( - name="name_value", - ) - ) - response = await client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.CreateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_create_backup_schedule_async_from_dict(): - await test_create_backup_schedule_async(request_type=dict) - - -def test_create_backup_schedule_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateBackupScheduleRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - call.return_value = schedule.BackupSchedule() - client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_backup_schedule_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.CreateBackupScheduleRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule() - ) - await client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_create_backup_schedule_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_backup_schedule( - parent="parent_value", - backup_schedule=schedule.BackupSchedule(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup_schedule - mock_val = schedule.BackupSchedule(name="name_value") - assert arg == mock_val - - -def test_create_backup_schedule_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_schedule( - firestore_admin.CreateBackupScheduleRequest(), - parent="parent_value", - backup_schedule=schedule.BackupSchedule(name="name_value"), - ) - - -@pytest.mark.asyncio -async def test_create_backup_schedule_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_backup_schedule( - parent="parent_value", - backup_schedule=schedule.BackupSchedule(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup_schedule - mock_val = schedule.BackupSchedule(name="name_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_backup_schedule_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_backup_schedule( - firestore_admin.CreateBackupScheduleRequest(), - parent="parent_value", - backup_schedule=schedule.BackupSchedule(name="name_value"), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetBackupScheduleRequest, - dict, - ], -) -def test_get_backup_schedule(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule( - name="name_value", - ) - response = client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" - - -def test_get_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.GetBackupScheduleRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetBackupScheduleRequest( - name="name_value", - ) - - -def test_get_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_backup_schedule in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_backup_schedule - ] = mock_rpc - request = {} - client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_backup_schedule_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_backup_schedule - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_backup_schedule - ] = mock_rpc - - request = {} - await client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_backup_schedule_async( - transport: str = "grpc_asyncio", - request_type=firestore_admin.GetBackupScheduleRequest, -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule( - name="name_value", - ) - ) - response = await client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.GetBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_get_backup_schedule_async_from_dict(): - await test_get_backup_schedule_async(request_type=dict) - - -def test_get_backup_schedule_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetBackupScheduleRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - call.return_value = schedule.BackupSchedule() - client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_backup_schedule_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.GetBackupScheduleRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule() - ) - await client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_backup_schedule_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup_schedule( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_backup_schedule_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_schedule( - firestore_admin.GetBackupScheduleRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_backup_schedule_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup_schedule( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_backup_schedule_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup_schedule( - firestore_admin.GetBackupScheduleRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListBackupSchedulesRequest, - dict, - ], -) -def test_list_backup_schedules(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupSchedulesResponse() - response = client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListBackupSchedulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) - - -def test_list_backup_schedules_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_backup_schedules(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListBackupSchedulesRequest( - parent="parent_value", - ) - - -def test_list_backup_schedules_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_backup_schedules - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_backup_schedules - ] = mock_rpc - request = {} - client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_schedules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_backup_schedules_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.list_backup_schedules - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_backup_schedules - ] = mock_rpc - - request = {} - await client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backup_schedules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_backup_schedules_async( - transport: str = "grpc_asyncio", - request_type=firestore_admin.ListBackupSchedulesRequest, -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListBackupSchedulesResponse() - ) - response = await client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.ListBackupSchedulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) - - -@pytest.mark.asyncio -async def test_list_backup_schedules_async_from_dict(): - await test_list_backup_schedules_async(request_type=dict) - - -def test_list_backup_schedules_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListBackupSchedulesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - call.return_value = firestore_admin.ListBackupSchedulesResponse() - client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_backup_schedules_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.ListBackupSchedulesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListBackupSchedulesResponse() - ) - await client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_backup_schedules_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupSchedulesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backup_schedules( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_backup_schedules_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_schedules( - firestore_admin.ListBackupSchedulesRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_backup_schedules_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore_admin.ListBackupSchedulesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListBackupSchedulesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backup_schedules( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_backup_schedules_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backup_schedules( - firestore_admin.ListBackupSchedulesRequest(), - parent="parent_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.UpdateBackupScheduleRequest, - dict, - ], -) -def test_update_backup_schedule(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule( - name="name_value", - ) - response = client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" - - -def test_update_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.UpdateBackupScheduleRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateBackupScheduleRequest() - - -def test_update_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.update_backup_schedule - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_backup_schedule - ] = mock_rpc - request = {} - client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_update_backup_schedule_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_backup_schedule - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_backup_schedule - ] = mock_rpc - - request = {} - await client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_update_backup_schedule_async( - transport: str = "grpc_asyncio", - request_type=firestore_admin.UpdateBackupScheduleRequest, -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule( - name="name_value", - ) - ) - response = await client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.UpdateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_update_backup_schedule_async_from_dict(): - await test_update_backup_schedule_async(request_type=dict) - - -def test_update_backup_schedule_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateBackupScheduleRequest() - - request.backup_schedule.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - call.return_value = schedule.BackupSchedule() - client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "backup_schedule.name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_backup_schedule_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.UpdateBackupScheduleRequest() - - request.backup_schedule.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule() - ) - await client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "backup_schedule.name=name_value", - ) in kw["metadata"] - - -def test_update_backup_schedule_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_backup_schedule( - backup_schedule=schedule.BackupSchedule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].backup_schedule - mock_val = schedule.BackupSchedule(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -def test_update_backup_schedule_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup_schedule( - firestore_admin.UpdateBackupScheduleRequest(), - backup_schedule=schedule.BackupSchedule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.asyncio -async def test_update_backup_schedule_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_backup_schedule( - backup_schedule=schedule.BackupSchedule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].backup_schedule - mock_val = schedule.BackupSchedule(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_backup_schedule_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_backup_schedule( - firestore_admin.UpdateBackupScheduleRequest(), - backup_schedule=schedule.BackupSchedule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteBackupScheduleRequest, - dict, - ], -) -def test_delete_backup_schedule(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteBackupScheduleRequest( - name="name_value", - ) - - -def test_delete_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.delete_backup_schedule - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_backup_schedule - ] = mock_rpc - request = {} - client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_backup_schedule - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_backup_schedule - ] = mock_rpc - - request = {} - await client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_async( - transport: str = "grpc_asyncio", - request_type=firestore_admin.DeleteBackupScheduleRequest, -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.DeleteBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_async_from_dict(): - await test_delete_backup_schedule_async(request_type=dict) - - -def test_delete_backup_schedule_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteBackupScheduleRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - call.return_value = None - client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore_admin.DeleteBackupScheduleRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_delete_backup_schedule_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup_schedule( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_backup_schedule_flattened_error(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_schedule( - firestore_admin.DeleteBackupScheduleRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_flattened_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup_schedule( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_flattened_error_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup_schedule( - firestore_admin.DeleteBackupScheduleRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CloneDatabaseRequest, - dict, - ], -) -def test_clone_database(request_type, transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.clone_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore_admin.CloneDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_clone_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore_admin.CloneDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.clone_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CloneDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - -def test_clone_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.clone_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.clone_database] = mock_rpc - request = {} - client.clone_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.clone_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_clone_database_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.clone_database - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.clone_database - ] = mock_rpc - - request = {} - await client.clone_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.clone_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_clone_database_async( - transport: str = "grpc_asyncio", request_type=firestore_admin.CloneDatabaseRequest -): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.clone_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore_admin.CloneDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_clone_database_async_from_dict(): - await test_clone_database_async(request_type=dict) - - -def test_create_index_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_index in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_index] = mock_rpc - - request = {} - client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_index(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_index_rest_required_fields( - request_type=firestore_admin.CreateIndexRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_index(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_index_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_index._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "index", - ) - ) - ) - - -def test_create_index_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - index=gfa_index.Index(name="name_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_index(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes" - % client.transport._host, - args[1], - ) - - -def test_create_index_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_index( - firestore_admin.CreateIndexRequest(), - parent="parent_value", - index=gfa_index.Index(name="name_value"), - ) - - -def test_list_indexes_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_indexes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_indexes] = mock_rpc - - request = {} - client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_indexes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_indexes_rest_required_fields( - request_type=firestore_admin.ListIndexesRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_indexes._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_indexes._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListIndexesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListIndexesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_indexes(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_indexes_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_indexes._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -def test_list_indexes_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListIndexesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListIndexesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_indexes(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes" - % client.transport._host, - args[1], - ) - - -def test_list_indexes_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_indexes( - firestore_admin.ListIndexesRequest(), - parent="parent_value", - ) - - -def test_list_indexes_rest_pager(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token="abc", - ), - firestore_admin.ListIndexesResponse( - indexes=[], - next_page_token="def", - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token="ghi", - ), - firestore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - firestore_admin.ListIndexesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - - pager = client.list_indexes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, index.Index) for i in results) - - pages = list(client.list_indexes(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_index_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_index in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_index] = mock_rpc - - request = {} - client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_index(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_index_rest_required_fields(request_type=firestore_admin.GetIndexRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = index.Index() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = index.Index.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_index(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_index_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_index._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_index_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = index.Index() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = index.Index.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_index(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}" - % client.transport._host, - args[1], - ) - - -def test_get_index_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_index( - firestore_admin.GetIndexRequest(), - name="name_value", - ) - - -def test_delete_index_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_index in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_index] = mock_rpc - - request = {} - client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_index(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_index_rest_required_fields( - request_type=firestore_admin.DeleteIndexRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_index._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_index(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_index_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_index._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_delete_index_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_index(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_index_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_index( - firestore_admin.DeleteIndexRequest(), - name="name_value", - ) - - -def test_get_field_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_field in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_field] = mock_rpc - - request = {} - client.get_field(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_field(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_field_rest_required_fields(request_type=firestore_admin.GetFieldRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_field._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_field._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = field.Field() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = field.Field.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_field(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_field_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_field._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_field_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = field.Field() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = field.Field.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_field(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}" - % client.transport._host, - args[1], - ) - - -def test_get_field_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_field( - firestore_admin.GetFieldRequest(), - name="name_value", - ) - - -def test_update_field_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_field in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_field] = mock_rpc - - request = {} - client.update_field(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_field(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_field_rest_required_fields( - request_type=firestore_admin.UpdateFieldRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_field._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_field._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_field(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_field_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_field._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("field",))) - - -def test_update_field_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "field": { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" - } - } - - # get truthy value for each flattened field - mock_args = dict( - field=gfa_field.Field(name="name_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_field(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}" - % client.transport._host, - args[1], - ) - - -def test_update_field_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_field( - firestore_admin.UpdateFieldRequest(), - field=gfa_field.Field(name="name_value"), - ) - - -def test_list_fields_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_fields in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_fields] = mock_rpc - - request = {} - client.list_fields(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_fields(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_fields_rest_required_fields( - request_type=firestore_admin.ListFieldsRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_fields._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_fields._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListFieldsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListFieldsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_fields(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_fields_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_fields._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -def test_list_fields_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListFieldsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListFieldsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_fields(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields" - % client.transport._host, - args[1], - ) - - -def test_list_fields_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_fields( - firestore_admin.ListFieldsRequest(), - parent="parent_value", - ) - - -def test_list_fields_rest_pager(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - field.Field(), - ], - next_page_token="abc", - ), - firestore_admin.ListFieldsResponse( - fields=[], - next_page_token="def", - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - ], - next_page_token="ghi", - ), - firestore_admin.ListFieldsResponse( - fields=[ - field.Field(), - field.Field(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - firestore_admin.ListFieldsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - - pager = client.list_fields(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, field.Field) for i in results) - - pages = list(client.list_fields(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_export_documents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_documents in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.export_documents - ] = mock_rpc - - request = {} - client.export_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_export_documents_rest_required_fields( - request_type=firestore_admin.ExportDocumentsRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).export_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).export_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.export_documents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_export_documents_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.export_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_export_documents_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.export_documents(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*}:exportDocuments" - % client.transport._host, - args[1], - ) - - -def test_export_documents_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_documents( - firestore_admin.ExportDocumentsRequest(), - name="name_value", - ) - - -def test_import_documents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.import_documents in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.import_documents - ] = mock_rpc - - request = {} - client.import_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.import_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_import_documents_rest_required_fields( - request_type=firestore_admin.ImportDocumentsRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.import_documents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_import_documents_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.import_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_import_documents_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.import_documents(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*}:importDocuments" - % client.transport._host, - args[1], - ) - - -def test_import_documents_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.import_documents( - firestore_admin.ImportDocumentsRequest(), - name="name_value", - ) - - -def test_bulk_delete_documents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.bulk_delete_documents - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.bulk_delete_documents - ] = mock_rpc - - request = {} - client.bulk_delete_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.bulk_delete_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_bulk_delete_documents_rest_required_fields( - request_type=firestore_admin.BulkDeleteDocumentsRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).bulk_delete_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).bulk_delete_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.bulk_delete_documents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_bulk_delete_documents_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.bulk_delete_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_bulk_delete_documents_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.bulk_delete_documents(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*}:bulkDeleteDocuments" - % client.transport._host, - args[1], - ) - - -def test_bulk_delete_documents_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.bulk_delete_documents( - firestore_admin.BulkDeleteDocumentsRequest(), - name="name_value", - ) - - -def test_create_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - - request = {} - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_database_rest_required_fields( - request_type=firestore_admin.CreateDatabaseRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["database_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - assert "databaseId" not in jsonified_request - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == request_init["database_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["databaseId"] = "database_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("database_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == "database_id_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_database(request) - - expected_params = [ - ( - "databaseId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_database_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_database._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("databaseId",)) - & set( - ( - "parent", - "database", - "databaseId", - ) - ) - ) - - -def test_create_database_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - database=gfa_database.Database(name="name_value"), - database_id="database_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*}/databases" % client.transport._host, args[1] - ) - - -def test_create_database_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - firestore_admin.CreateDatabaseRequest(), - parent="parent_value", - database=gfa_database.Database(name="name_value"), - database_id="database_id_value", - ) - - -def test_get_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_database] = mock_rpc - - request = {} - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_database_rest_required_fields( - request_type=firestore_admin.GetDatabaseRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = database.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = database.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_database(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_database_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_database_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = database.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = database.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*}" % client.transport._host, args[1] - ) - - -def test_get_database_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - firestore_admin.GetDatabaseRequest(), - name="name_value", - ) - - -def test_list_databases_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc - - request = {} - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_databases_rest_required_fields( - request_type=firestore_admin.ListDatabasesRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_databases._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_databases._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("show_deleted",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListDatabasesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_databases(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_databases_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_databases._get_unset_required_fields({}) - assert set(unset_fields) == (set(("showDeleted",)) & set(("parent",))) - - -def test_list_databases_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListDatabasesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_databases(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*}/databases" % client.transport._host, args[1] - ) - - -def test_list_databases_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - firestore_admin.ListDatabasesRequest(), - parent="parent_value", - ) - - -def test_update_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc - - request = {} - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_database_rest_required_fields( - request_type=firestore_admin.UpdateDatabaseRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_database(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_database_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("database",))) - - -def test_update_database_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"database": {"name": "projects/sample1/databases/sample2"}} - - # get truthy value for each flattened field - mock_args = dict( - database=gfa_database.Database(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{database.name=projects/*/databases/*}" % client.transport._host, - args[1], - ) - - -def test_update_database_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - firestore_admin.UpdateDatabaseRequest(), - database=gfa_database.Database(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_delete_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_database] = mock_rpc - - request = {} - client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_database_rest_required_fields( - request_type=firestore_admin.DeleteDatabaseRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_database(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_database_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag",)) & set(("name",))) - - -def test_delete_database_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*}" % client.transport._host, args[1] - ) - - -def test_delete_database_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_database( - firestore_admin.DeleteDatabaseRequest(), - name="name_value", - ) - - -def test_create_user_creds_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_user_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_user_creds - ] = mock_rpc - - request = {} - client.create_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_user_creds_rest_required_fields( - request_type=firestore_admin.CreateUserCredsRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["user_creds_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - assert "userCredsId" not in jsonified_request - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_user_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "userCredsId" in jsonified_request - assert jsonified_request["userCredsId"] == request_init["user_creds_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["userCredsId"] = "user_creds_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_user_creds._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("user_creds_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "userCredsId" in jsonified_request - assert jsonified_request["userCredsId"] == "user_creds_id_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gfa_user_creds.UserCreds() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gfa_user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_user_creds(request) - - expected_params = [ - ( - "userCredsId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_user_creds_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_user_creds._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("userCredsId",)) - & set( - ( - "parent", - "userCreds", - "userCredsId", - ) - ) - ) - - -def test_create_user_creds_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gfa_user_creds.UserCreds() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - user_creds=gfa_user_creds.UserCreds(name="name_value"), - user_creds_id="user_creds_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gfa_user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_user_creds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/databases/*}/userCreds" % client.transport._host, - args[1], - ) - - -def test_create_user_creds_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_user_creds( - firestore_admin.CreateUserCredsRequest(), - parent="parent_value", - user_creds=gfa_user_creds.UserCreds(name="name_value"), - user_creds_id="user_creds_id_value", - ) - - -def test_get_user_creds_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_user_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_user_creds] = mock_rpc - - request = {} - client.get_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_user_creds_rest_required_fields( - request_type=firestore_admin.GetUserCredsRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_user_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_user_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_user_creds(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_user_creds_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_user_creds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_user_creds_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/userCreds/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_user_creds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/userCreds/*}" % client.transport._host, - args[1], - ) - - -def test_get_user_creds_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_user_creds( - firestore_admin.GetUserCredsRequest(), - name="name_value", - ) - - -def test_list_user_creds_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_user_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_user_creds] = mock_rpc - - request = {} - client.list_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_user_creds_rest_required_fields( - request_type=firestore_admin.ListUserCredsRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_user_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_user_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListUserCredsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListUserCredsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_user_creds(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_user_creds_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_user_creds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -def test_list_user_creds_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListUserCredsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListUserCredsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_user_creds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/databases/*}/userCreds" % client.transport._host, - args[1], - ) - - -def test_list_user_creds_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_user_creds( - firestore_admin.ListUserCredsRequest(), - parent="parent_value", - ) - - -def test_enable_user_creds_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.enable_user_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.enable_user_creds - ] = mock_rpc - - request = {} - client.enable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.enable_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_enable_user_creds_rest_required_fields( - request_type=firestore_admin.EnableUserCredsRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).enable_user_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).enable_user_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.enable_user_creds(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_enable_user_creds_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.enable_user_creds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_enable_user_creds_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/userCreds/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.enable_user_creds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/userCreds/*}:enable" - % client.transport._host, - args[1], - ) - - -def test_enable_user_creds_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.enable_user_creds( - firestore_admin.EnableUserCredsRequest(), - name="name_value", - ) - - -def test_disable_user_creds_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.disable_user_creds in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.disable_user_creds - ] = mock_rpc - - request = {} - client.disable_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.disable_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_disable_user_creds_rest_required_fields( - request_type=firestore_admin.DisableUserCredsRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).disable_user_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).disable_user_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.disable_user_creds(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_disable_user_creds_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.disable_user_creds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_disable_user_creds_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/userCreds/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.disable_user_creds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/userCreds/*}:disable" - % client.transport._host, - args[1], - ) - - -def test_disable_user_creds_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.disable_user_creds( - firestore_admin.DisableUserCredsRequest(), - name="name_value", - ) - - -def test_reset_user_password_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.reset_user_password in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.reset_user_password - ] = mock_rpc - - request = {} - client.reset_user_password(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.reset_user_password(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_reset_user_password_rest_required_fields( - request_type=firestore_admin.ResetUserPasswordRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).reset_user_password._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).reset_user_password._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.reset_user_password(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_reset_user_password_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.reset_user_password._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_reset_user_password_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/userCreds/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.reset_user_password(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/userCreds/*}:resetPassword" - % client.transport._host, - args[1], - ) - - -def test_reset_user_password_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.reset_user_password( - firestore_admin.ResetUserPasswordRequest(), - name="name_value", - ) - - -def test_delete_user_creds_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_user_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_user_creds - ] = mock_rpc - - request = {} - client.delete_user_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_user_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_user_creds_rest_required_fields( - request_type=firestore_admin.DeleteUserCredsRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_user_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_user_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_user_creds(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_user_creds_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_user_creds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_delete_user_creds_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/userCreds/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_user_creds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/userCreds/*}" % client.transport._host, - args[1], - ) - - -def test_delete_user_creds_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_user_creds( - firestore_admin.DeleteUserCredsRequest(), - name="name_value", - ) - - -def test_get_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc - - request = {} - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_backup_rest_required_fields(request_type=firestore_admin.GetBackupRequest): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup.Backup() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_backup(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_backup_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_backup_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backup.Backup() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, - args[1], - ) - - -def test_get_backup_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup( - firestore_admin.GetBackupRequest(), - name="name_value", - ) - - -def test_list_backups_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc - - request = {} - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backups_rest_required_fields( - request_type=firestore_admin.ListBackupsRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backups(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_backups_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_backups._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter",)) & set(("parent",))) - - -def test_list_backups_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backups(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backups" % client.transport._host, - args[1], - ) - - -def test_list_backups_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - firestore_admin.ListBackupsRequest(), - parent="parent_value", - ) - - -def test_delete_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc - - request = {} - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_backup_rest_required_fields( - request_type=firestore_admin.DeleteBackupRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_backup(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_backup_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_delete_backup_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backups/*}" % client.transport._host, - args[1], - ) - - -def test_delete_backup_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup( - firestore_admin.DeleteBackupRequest(), - name="name_value", - ) - - -def test_restore_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restore_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.restore_database - ] = mock_rpc - - request = {} - client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_restore_database_rest_required_fields( - request_type=firestore_admin.RestoreDatabaseRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["database_id"] = "" - request_init["backup"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).restore_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - jsonified_request["databaseId"] = "database_id_value" - jsonified_request["backup"] = "backup_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).restore_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == "database_id_value" - assert "backup" in jsonified_request - assert jsonified_request["backup"] == "backup_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.restore_database(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_restore_database_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.restore_database._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "databaseId", - "backup", - ) - ) - ) - - -def test_create_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_backup_schedule - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_backup_schedule - ] = mock_rpc - - request = {} - client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_backup_schedule_rest_required_fields( - request_type=firestore_admin.CreateBackupScheduleRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_backup_schedule(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_backup_schedule_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "backupSchedule", - ) - ) - ) - - -def test_create_backup_schedule_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - backup_schedule=schedule.BackupSchedule(name="name_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/databases/*}/backupSchedules" - % client.transport._host, - args[1], - ) - - -def test_create_backup_schedule_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_schedule( - firestore_admin.CreateBackupScheduleRequest(), - parent="parent_value", - backup_schedule=schedule.BackupSchedule(name="name_value"), - ) - - -def test_get_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_backup_schedule in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_backup_schedule - ] = mock_rpc - - request = {} - client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_backup_schedule_rest_required_fields( - request_type=firestore_admin.GetBackupScheduleRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_backup_schedule(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_backup_schedule_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_backup_schedule_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/backupSchedules/*}" - % client.transport._host, - args[1], - ) - - -def test_get_backup_schedule_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_schedule( - firestore_admin.GetBackupScheduleRequest(), - name="name_value", - ) - - -def test_list_backup_schedules_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_backup_schedules - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_backup_schedules - ] = mock_rpc - - request = {} - client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_schedules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backup_schedules_rest_required_fields( - request_type=firestore_admin.ListBackupSchedulesRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_backup_schedules._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_backup_schedules._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupSchedulesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backup_schedules(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_backup_schedules_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_backup_schedules._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -def test_list_backup_schedules_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupSchedulesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backup_schedules(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/databases/*}/backupSchedules" - % client.transport._host, - args[1], - ) - - -def test_list_backup_schedules_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_schedules( - firestore_admin.ListBackupSchedulesRequest(), - parent="parent_value", - ) - - -def test_update_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.update_backup_schedule - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_backup_schedule - ] = mock_rpc - - request = {} - client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_backup_schedule_rest_required_fields( - request_type=firestore_admin.UpdateBackupScheduleRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_backup_schedule._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_backup_schedule(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_backup_schedule_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("backupSchedule",))) - - -def test_update_backup_schedule_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = { - "backup_schedule": { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - backup_schedule=schedule.BackupSchedule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{backup_schedule.name=projects/*/databases/*/backupSchedules/*}" - % client.transport._host, - args[1], - ) - - -def test_update_backup_schedule_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup_schedule( - firestore_admin.UpdateBackupScheduleRequest(), - backup_schedule=schedule.BackupSchedule(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_delete_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.delete_backup_schedule - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_backup_schedule - ] = mock_rpc - - request = {} - client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_backup_schedule_rest_required_fields( - request_type=firestore_admin.DeleteBackupScheduleRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_backup_schedule(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_backup_schedule_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_delete_backup_schedule_rest_flattened(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/backupSchedules/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_backup_schedule_rest_flattened_error(transport: str = "rest"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_schedule( - firestore_admin.DeleteBackupScheduleRequest(), - name="name_value", - ) - - -def test_clone_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.clone_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.clone_database] = mock_rpc - - request = {} - client.clone_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.clone_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_clone_database_rest_required_fields( - request_type=firestore_admin.CloneDatabaseRequest, -): - transport_class = transports.FirestoreAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["database_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).clone_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - jsonified_request["databaseId"] = "database_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).clone_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == "database_id_value" - - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.clone_database(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_clone_database_rest_unset_required_fields(): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.clone_database._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "databaseId", - "pitrSnapshot", - ) - ) - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = FirestoreAdminClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.FirestoreAdminGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminGrpcAsyncIOTransport, - transports.FirestoreAdminRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -def test_transport_kind_grpc(): - transport = FirestoreAdminClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_index_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_index(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateIndexRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_indexes_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - call.return_value = firestore_admin.ListIndexesResponse() - client.list_indexes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListIndexesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_index_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - call.return_value = index.Index() - client.get_index(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetIndexRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_index_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - call.return_value = None - client.delete_index(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteIndexRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_field_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - call.return_value = field.Field() - client.get_field(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetFieldRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_field_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_field(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.UpdateFieldRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_fields_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - call.return_value = firestore_admin.ListFieldsResponse() - client.list_fields(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListFieldsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_documents_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.export_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ExportDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_import_documents_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.import_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ImportDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_bulk_delete_documents_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.bulk_delete_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.BulkDeleteDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_database_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - call.return_value = database.Database() - client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_databases_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - call.return_value = firestore_admin.ListDatabasesResponse() - client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_database_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_user_creds_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_user_creds), "__call__" - ) as call: - call.return_value = gfa_user_creds.UserCreds() - client.create_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_user_creds_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_user_creds), "__call__") as call: - call.return_value = user_creds.UserCreds() - client.get_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_user_creds_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_user_creds), "__call__") as call: - call.return_value = firestore_admin.ListUserCredsResponse() - client.list_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_enable_user_creds_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.enable_user_creds), "__call__" - ) as call: - call.return_value = user_creds.UserCreds() - client.enable_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.EnableUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_disable_user_creds_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.disable_user_creds), "__call__" - ) as call: - call.return_value = user_creds.UserCreds() - client.disable_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DisableUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_reset_user_password_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.reset_user_password), "__call__" - ) as call: - call.return_value = user_creds.UserCreds() - client.reset_user_password(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ResetUserPasswordRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_user_creds_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_user_creds), "__call__" - ) as call: - call.return_value = None - client.delete_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = backup.Backup() - client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backups_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - call.return_value = firestore_admin.ListBackupsResponse() - client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - call.return_value = None - client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restore_database_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.restore_database), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.restore_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.RestoreDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_schedule_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - call.return_value = schedule.BackupSchedule() - client.create_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_schedule_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - call.return_value = schedule.BackupSchedule() - client.get_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_schedules_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - call.return_value = firestore_admin.ListBackupSchedulesResponse() - client.list_backup_schedules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListBackupSchedulesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_schedule_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - call.return_value = schedule.BackupSchedule() - client.update_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.UpdateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_schedule_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - call.return_value = None - client.delete_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_clone_database_empty_call_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.clone_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CloneDatabaseRequest() - - assert args[0] == request_msg - - -def test_clone_database_routing_parameters_request_1_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.clone_database( - request={"pitr_snapshot": {"database": "projects/sample1/sample2"}} - ) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore_admin.CloneDatabaseRequest( - **{"pitr_snapshot": {"database": "projects/sample1/sample2"}} - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - -def test_clone_database_routing_parameters_request_2_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.clone_database( - request={ - "pitr_snapshot": { - "database": "projects/sample1/databases/sample2/sample3" - } - } - ) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore_admin.CloneDatabaseRequest( - **{ - "pitr_snapshot": { - "database": "projects/sample1/databases/sample2/sample3" - } - } - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1", "database_id": "sample2"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - -def test_transport_kind_grpc_asyncio(): - transport = FirestoreAdminAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_index_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_index(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateIndexRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_indexes_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListIndexesResponse( - next_page_token="next_page_token_value", - ) - ) - await client.list_indexes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListIndexesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_index_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - index.Index( - name="name_value", - query_scope=index.Index.QueryScope.COLLECTION, - api_scope=index.Index.ApiScope.DATASTORE_MODE_API, - state=index.Index.State.CREATING, - density=index.Index.Density.SPARSE_ALL, - multikey=True, - shard_count=1178, - ) - ) - await client.get_index(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetIndexRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_index_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_index(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteIndexRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_field_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - field.Field( - name="name_value", - ) - ) - await client.get_field(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetFieldRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_field_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.update_field(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.UpdateFieldRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_fields_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListFieldsResponse( - next_page_token="next_page_token_value", - ) - ) - await client.list_fields(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListFieldsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_export_documents_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.export_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ExportDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_import_documents_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.import_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ImportDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_bulk_delete_documents_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.bulk_delete_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.BulkDeleteDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_database_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_database_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - database.Database( - name="name_value", - uid="uid_value", - location_id="location_id_value", - type_=database.Database.DatabaseType.FIRESTORE_NATIVE, - concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, - point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, - app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, - key_prefix="key_prefix_value", - delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, - previous_id="previous_id_value", - free_tier=True, - etag="etag_value", - database_edition=database.Database.DatabaseEdition.STANDARD, - ) - ) - await client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_databases_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListDatabasesResponse( - unreachable=["unreachable_value"], - ) - ) - await client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_database_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_database_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.delete_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_user_creds_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gfa_user_creds.UserCreds( - name="name_value", - state=gfa_user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - ) - await client.create_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_user_creds_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_user_creds), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - ) - await client.get_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_user_creds_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_user_creds), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListUserCredsResponse() - ) - await client.list_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_enable_user_creds_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.enable_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - ) - await client.enable_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.EnableUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_disable_user_creds_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.disable_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - ) - await client.disable_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DisableUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_reset_user_password_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.reset_user_password), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - ) - await client.reset_user_password(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ResetUserPasswordRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_user_creds_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_user_creds), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backup.Backup( - name="name_value", - database="database_value", - database_uid="database_uid_value", - state=backup.Backup.State.CREATING, - ) - ) - await client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backups_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListBackupsResponse( - unreachable=["unreachable_value"], - ) - ) - await client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_restore_database_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.restore_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.restore_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.RestoreDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_backup_schedule_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule( - name="name_value", - ) - ) - await client.create_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_schedule_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule( - name="name_value", - ) - ) - await client.get_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backup_schedules_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore_admin.ListBackupSchedulesResponse() - ) - await client.list_backup_schedules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListBackupSchedulesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_backup_schedule_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schedule.BackupSchedule( - name="name_value", - ) - ) - await client.update_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.UpdateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_schedule_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_clone_database_empty_call_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.clone_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CloneDatabaseRequest() - - assert args[0] == request_msg - - -@pytest.mark.asyncio -async def test_clone_database_routing_parameters_request_1_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.clone_database( - request={"pitr_snapshot": {"database": "projects/sample1/sample2"}} - ) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore_admin.CloneDatabaseRequest( - **{"pitr_snapshot": {"database": "projects/sample1/sample2"}} - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - -@pytest.mark.asyncio -async def test_clone_database_routing_parameters_request_2_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.clone_database( - request={ - "pitr_snapshot": { - "database": "projects/sample1/databases/sample2/sample3" - } - } - ) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore_admin.CloneDatabaseRequest( - **{ - "pitr_snapshot": { - "database": "projects/sample1/databases/sample2/sample3" - } - } - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1", "database_id": "sample2"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - -def test_transport_kind_rest(): - transport = FirestoreAdminClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_index_rest_bad_request(request_type=firestore_admin.CreateIndexRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_index(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CreateIndexRequest, - dict, - ], -) -def test_create_index_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request_init["index"] = { - "name": "name_value", - "query_scope": 1, - "api_scope": 1, - "fields": [ - { - "field_path": "field_path_value", - "order": 1, - "array_config": 1, - "vector_config": {"dimension": 966, "flat": {}}, - } - ], - "state": 1, - "density": 1, - "multikey": True, - "shard_count": 1178, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.CreateIndexRequest.meta.fields["index"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["index"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["index"][field])): - del request_init["index"][field][i][subfield] - else: - del request_init["index"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_index(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_index_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_create_index" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_create_index_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_create_index" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.CreateIndexRequest.pb( - firestore_admin.CreateIndexRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = firestore_admin.CreateIndexRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_index( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_indexes_rest_bad_request(request_type=firestore_admin.ListIndexesRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_indexes(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListIndexesRequest, - dict, - ], -) -def test_list_indexes_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListIndexesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListIndexesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_indexes(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_indexes_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_indexes" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_indexes_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_list_indexes" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.ListIndexesRequest.pb( - firestore_admin.ListIndexesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore_admin.ListIndexesResponse.to_json( - firestore_admin.ListIndexesResponse() - ) - req.return_value.content = return_value - - request = firestore_admin.ListIndexesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListIndexesResponse() - post_with_metadata.return_value = ( - firestore_admin.ListIndexesResponse(), - metadata, - ) - - client.list_indexes( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_index_rest_bad_request(request_type=firestore_admin.GetIndexRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_index(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetIndexRequest, - dict, - ], -) -def test_get_index_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = index.Index( - name="name_value", - query_scope=index.Index.QueryScope.COLLECTION, - api_scope=index.Index.ApiScope.DATASTORE_MODE_API, - state=index.Index.State.CREATING, - density=index.Index.Density.SPARSE_ALL, - multikey=True, - shard_count=1178, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = index.Index.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_index(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.name == "name_value" - assert response.query_scope == index.Index.QueryScope.COLLECTION - assert response.api_scope == index.Index.ApiScope.DATASTORE_MODE_API - assert response.state == index.Index.State.CREATING - assert response.density == index.Index.Density.SPARSE_ALL - assert response.multikey is True - assert response.shard_count == 1178 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_index_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_index" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_index_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_get_index" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.GetIndexRequest.pb( - firestore_admin.GetIndexRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = index.Index.to_json(index.Index()) - req.return_value.content = return_value - - request = firestore_admin.GetIndexRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = index.Index() - post_with_metadata.return_value = index.Index(), metadata - - client.get_index( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_index_rest_bad_request(request_type=firestore_admin.DeleteIndexRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_index(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteIndexRequest, - dict, - ], -) -def test_delete_index_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/indexes/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "" - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_index(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_index_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_delete_index" - ) as pre: - pre.assert_not_called() - pb_message = firestore_admin.DeleteIndexRequest.pb( - firestore_admin.DeleteIndexRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = firestore_admin.DeleteIndexRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_index( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_get_field_rest_bad_request(request_type=firestore_admin.GetFieldRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_field(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetFieldRequest, - dict, - ], -) -def test_get_field_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = field.Field( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = field.Field.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_field(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, field.Field) - assert response.name == "name_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_field_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_field" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_field_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_get_field" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.GetFieldRequest.pb( - firestore_admin.GetFieldRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = field.Field.to_json(field.Field()) - req.return_value.content = return_value - - request = firestore_admin.GetFieldRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = field.Field() - post_with_metadata.return_value = field.Field(), metadata - - client.get_field( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_field_rest_bad_request(request_type=firestore_admin.UpdateFieldRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "field": { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_field(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.UpdateFieldRequest, - dict, - ], -) -def test_update_field_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "field": { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4" - } - } - request_init["field"] = { - "name": "projects/sample1/databases/sample2/collectionGroups/sample3/fields/sample4", - "index_config": { - "indexes": [ - { - "name": "name_value", - "query_scope": 1, - "api_scope": 1, - "fields": [ - { - "field_path": "field_path_value", - "order": 1, - "array_config": 1, - "vector_config": {"dimension": 966, "flat": {}}, - } - ], - "state": 1, - "density": 1, - "multikey": True, - "shard_count": 1178, - } - ], - "uses_ancestor_config": True, - "ancestor_field": "ancestor_field_value", - "reverting": True, - }, - "ttl_config": {"state": 1}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.UpdateFieldRequest.meta.fields["field"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["field"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["field"][field])): - del request_init["field"][field][i][subfield] - else: - del request_init["field"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_field(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_field_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_update_field" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_update_field_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_update_field" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.UpdateFieldRequest.pb( - firestore_admin.UpdateFieldRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = firestore_admin.UpdateFieldRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_field( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_fields_rest_bad_request(request_type=firestore_admin.ListFieldsRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_fields(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListFieldsRequest, - dict, - ], -) -def test_list_fields_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/collectionGroups/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListFieldsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListFieldsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_fields(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFieldsPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_fields_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_fields" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_fields_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_list_fields" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.ListFieldsRequest.pb( - firestore_admin.ListFieldsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore_admin.ListFieldsResponse.to_json( - firestore_admin.ListFieldsResponse() - ) - req.return_value.content = return_value - - request = firestore_admin.ListFieldsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListFieldsResponse() - post_with_metadata.return_value = firestore_admin.ListFieldsResponse(), metadata - - client.list_fields( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_export_documents_rest_bad_request( - request_type=firestore_admin.ExportDocumentsRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_documents(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ExportDocumentsRequest, - dict, - ], -) -def test_export_documents_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_documents(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_export_documents" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_export_documents_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_export_documents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.ExportDocumentsRequest.pb( - firestore_admin.ExportDocumentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = firestore_admin.ExportDocumentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.export_documents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_import_documents_rest_bad_request( - request_type=firestore_admin.ImportDocumentsRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.import_documents(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ImportDocumentsRequest, - dict, - ], -) -def test_import_documents_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.import_documents(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_import_documents" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_import_documents_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_import_documents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.ImportDocumentsRequest.pb( - firestore_admin.ImportDocumentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = firestore_admin.ImportDocumentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.import_documents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_bulk_delete_documents_rest_bad_request( - request_type=firestore_admin.BulkDeleteDocumentsRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.bulk_delete_documents(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.BulkDeleteDocumentsRequest, - dict, - ], -) -def test_bulk_delete_documents_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.bulk_delete_documents(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_bulk_delete_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_bulk_delete_documents" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, - "post_bulk_delete_documents_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_bulk_delete_documents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.BulkDeleteDocumentsRequest.pb( - firestore_admin.BulkDeleteDocumentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = firestore_admin.BulkDeleteDocumentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.bulk_delete_documents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_database_rest_bad_request( - request_type=firestore_admin.CreateDatabaseRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_database(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CreateDatabaseRequest, - dict, - ], -) -def test_create_database_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request_init["database"] = { - "name": "name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "location_id": "location_id_value", - "type_": 1, - "concurrency_mode": 1, - "version_retention_period": {"seconds": 751, "nanos": 543}, - "earliest_version_time": {}, - "point_in_time_recovery_enablement": 1, - "app_engine_integration_mode": 1, - "key_prefix": "key_prefix_value", - "delete_protection_state": 1, - "cmek_config": { - "kms_key_name": "kms_key_name_value", - "active_key_version": [ - "active_key_version_value1", - "active_key_version_value2", - ], - }, - "previous_id": "previous_id_value", - "source_info": { - "backup": {"backup": "backup_value"}, - "operation": "operation_value", - }, - "tags": {}, - "free_tier": True, - "etag": "etag_value", - "database_edition": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.CreateDatabaseRequest.meta.fields["database"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_database(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_create_database" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_create_database_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_create_database" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.CreateDatabaseRequest.pb( - firestore_admin.CreateDatabaseRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = firestore_admin.CreateDatabaseRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_database( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_database_rest_bad_request(request_type=firestore_admin.GetDatabaseRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_database(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetDatabaseRequest, - dict, - ], -) -def test_get_database_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = database.Database( - name="name_value", - uid="uid_value", - location_id="location_id_value", - type_=database.Database.DatabaseType.FIRESTORE_NATIVE, - concurrency_mode=database.Database.ConcurrencyMode.OPTIMISTIC, - point_in_time_recovery_enablement=database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED, - app_engine_integration_mode=database.Database.AppEngineIntegrationMode.ENABLED, - key_prefix="key_prefix_value", - delete_protection_state=database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED, - previous_id="previous_id_value", - free_tier=True, - etag="etag_value", - database_edition=database.Database.DatabaseEdition.STANDARD, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = database.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, database.Database) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.location_id == "location_id_value" - assert response.type_ == database.Database.DatabaseType.FIRESTORE_NATIVE - assert response.concurrency_mode == database.Database.ConcurrencyMode.OPTIMISTIC - assert ( - response.point_in_time_recovery_enablement - == database.Database.PointInTimeRecoveryEnablement.POINT_IN_TIME_RECOVERY_ENABLED - ) - assert ( - response.app_engine_integration_mode - == database.Database.AppEngineIntegrationMode.ENABLED - ) - assert response.key_prefix == "key_prefix_value" - assert ( - response.delete_protection_state - == database.Database.DeleteProtectionState.DELETE_PROTECTION_DISABLED - ) - assert response.previous_id == "previous_id_value" - assert response.free_tier is True - assert response.etag == "etag_value" - assert response.database_edition == database.Database.DatabaseEdition.STANDARD - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_database" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_database_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_get_database" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.GetDatabaseRequest.pb( - firestore_admin.GetDatabaseRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = database.Database.to_json(database.Database()) - req.return_value.content = return_value - - request = firestore_admin.GetDatabaseRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = database.Database() - post_with_metadata.return_value = database.Database(), metadata - - client.get_database( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_databases_rest_bad_request( - request_type=firestore_admin.ListDatabasesRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_databases(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListDatabasesRequest, - dict, - ], -) -def test_list_databases_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListDatabasesResponse( - unreachable=["unreachable_value"], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_databases(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListDatabasesResponse) - assert response.unreachable == ["unreachable_value"] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_databases_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_databases" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_databases_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_list_databases" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.ListDatabasesRequest.pb( - firestore_admin.ListDatabasesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore_admin.ListDatabasesResponse.to_json( - firestore_admin.ListDatabasesResponse() - ) - req.return_value.content = return_value - - request = firestore_admin.ListDatabasesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListDatabasesResponse() - post_with_metadata.return_value = ( - firestore_admin.ListDatabasesResponse(), - metadata, - ) - - client.list_databases( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_database_rest_bad_request( - request_type=firestore_admin.UpdateDatabaseRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"database": {"name": "projects/sample1/databases/sample2"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_database(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.UpdateDatabaseRequest, - dict, - ], -) -def test_update_database_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"database": {"name": "projects/sample1/databases/sample2"}} - request_init["database"] = { - "name": "projects/sample1/databases/sample2", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "location_id": "location_id_value", - "type_": 1, - "concurrency_mode": 1, - "version_retention_period": {"seconds": 751, "nanos": 543}, - "earliest_version_time": {}, - "point_in_time_recovery_enablement": 1, - "app_engine_integration_mode": 1, - "key_prefix": "key_prefix_value", - "delete_protection_state": 1, - "cmek_config": { - "kms_key_name": "kms_key_name_value", - "active_key_version": [ - "active_key_version_value1", - "active_key_version_value2", - ], - }, - "previous_id": "previous_id_value", - "source_info": { - "backup": {"backup": "backup_value"}, - "operation": "operation_value", - }, - "tags": {}, - "free_tier": True, - "etag": "etag_value", - "database_edition": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.UpdateDatabaseRequest.meta.fields["database"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_database(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_update_database" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_update_database_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_update_database" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.UpdateDatabaseRequest.pb( - firestore_admin.UpdateDatabaseRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = firestore_admin.UpdateDatabaseRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_database( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_database_rest_bad_request( - request_type=firestore_admin.DeleteDatabaseRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_database(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteDatabaseRequest, - dict, - ], -) -def test_delete_database_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_database(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_delete_database" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_delete_database_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_delete_database" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.DeleteDatabaseRequest.pb( - firestore_admin.DeleteDatabaseRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = firestore_admin.DeleteDatabaseRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_database( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_user_creds_rest_bad_request( - request_type=firestore_admin.CreateUserCredsRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_user_creds(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CreateUserCredsRequest, - dict, - ], -) -def test_create_user_creds_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request_init["user_creds"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "secure_password": "secure_password_value", - "resource_identity": {"principal": "principal_value"}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.CreateUserCredsRequest.meta.fields["user_creds"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["user_creds"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["user_creds"][field])): - del request_init["user_creds"][field][i][subfield] - else: - del request_init["user_creds"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gfa_user_creds.UserCreds( - name="name_value", - state=gfa_user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gfa_user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_user_creds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gfa_user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == gfa_user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_user_creds_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_create_user_creds" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_create_user_creds_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_create_user_creds" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.CreateUserCredsRequest.pb( - firestore_admin.CreateUserCredsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gfa_user_creds.UserCreds.to_json(gfa_user_creds.UserCreds()) - req.return_value.content = return_value - - request = firestore_admin.CreateUserCredsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gfa_user_creds.UserCreds() - post_with_metadata.return_value = gfa_user_creds.UserCreds(), metadata - - client.create_user_creds( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_user_creds_rest_bad_request( - request_type=firestore_admin.GetUserCredsRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2/userCreds/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_user_creds(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetUserCredsRequest, - dict, - ], -) -def test_get_user_creds_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2/userCreds/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_user_creds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_user_creds_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_user_creds" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_user_creds_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_get_user_creds" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.GetUserCredsRequest.pb( - firestore_admin.GetUserCredsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = user_creds.UserCreds.to_json(user_creds.UserCreds()) - req.return_value.content = return_value - - request = firestore_admin.GetUserCredsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = user_creds.UserCreds() - post_with_metadata.return_value = user_creds.UserCreds(), metadata - - client.get_user_creds( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_user_creds_rest_bad_request( - request_type=firestore_admin.ListUserCredsRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_user_creds(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListUserCredsRequest, - dict, - ], -) -def test_list_user_creds_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListUserCredsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListUserCredsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_user_creds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListUserCredsResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_user_creds_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_user_creds" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_user_creds_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_list_user_creds" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.ListUserCredsRequest.pb( - firestore_admin.ListUserCredsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore_admin.ListUserCredsResponse.to_json( - firestore_admin.ListUserCredsResponse() - ) - req.return_value.content = return_value - - request = firestore_admin.ListUserCredsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListUserCredsResponse() - post_with_metadata.return_value = ( - firestore_admin.ListUserCredsResponse(), - metadata, - ) - - client.list_user_creds( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_enable_user_creds_rest_bad_request( - request_type=firestore_admin.EnableUserCredsRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2/userCreds/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.enable_user_creds(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.EnableUserCredsRequest, - dict, - ], -) -def test_enable_user_creds_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2/userCreds/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.enable_user_creds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_enable_user_creds_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_enable_user_creds" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_enable_user_creds_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_enable_user_creds" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.EnableUserCredsRequest.pb( - firestore_admin.EnableUserCredsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = user_creds.UserCreds.to_json(user_creds.UserCreds()) - req.return_value.content = return_value - - request = firestore_admin.EnableUserCredsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = user_creds.UserCreds() - post_with_metadata.return_value = user_creds.UserCreds(), metadata - - client.enable_user_creds( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_disable_user_creds_rest_bad_request( - request_type=firestore_admin.DisableUserCredsRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2/userCreds/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.disable_user_creds(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DisableUserCredsRequest, - dict, - ], -) -def test_disable_user_creds_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2/userCreds/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.disable_user_creds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_disable_user_creds_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_disable_user_creds" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, - "post_disable_user_creds_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_disable_user_creds" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.DisableUserCredsRequest.pb( - firestore_admin.DisableUserCredsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = user_creds.UserCreds.to_json(user_creds.UserCreds()) - req.return_value.content = return_value - - request = firestore_admin.DisableUserCredsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = user_creds.UserCreds() - post_with_metadata.return_value = user_creds.UserCreds(), metadata - - client.disable_user_creds( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_reset_user_password_rest_bad_request( - request_type=firestore_admin.ResetUserPasswordRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2/userCreds/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.reset_user_password(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ResetUserPasswordRequest, - dict, - ], -) -def test_reset_user_password_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2/userCreds/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_creds.UserCreds( - name="name_value", - state=user_creds.UserCreds.State.ENABLED, - secure_password="secure_password_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = user_creds.UserCreds.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.reset_user_password(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, user_creds.UserCreds) - assert response.name == "name_value" - assert response.state == user_creds.UserCreds.State.ENABLED - assert response.secure_password == "secure_password_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reset_user_password_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_reset_user_password" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, - "post_reset_user_password_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_reset_user_password" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.ResetUserPasswordRequest.pb( - firestore_admin.ResetUserPasswordRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = user_creds.UserCreds.to_json(user_creds.UserCreds()) - req.return_value.content = return_value - - request = firestore_admin.ResetUserPasswordRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = user_creds.UserCreds() - post_with_metadata.return_value = user_creds.UserCreds(), metadata - - client.reset_user_password( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_user_creds_rest_bad_request( - request_type=firestore_admin.DeleteUserCredsRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2/userCreds/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_user_creds(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteUserCredsRequest, - dict, - ], -) -def test_delete_user_creds_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/databases/sample2/userCreds/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "" - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_user_creds(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_user_creds_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_delete_user_creds" - ) as pre: - pre.assert_not_called() - pb_message = firestore_admin.DeleteUserCredsRequest.pb( - firestore_admin.DeleteUserCredsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = firestore_admin.DeleteUserCredsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_user_creds( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_get_backup_rest_bad_request(request_type=firestore_admin.GetBackupRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetBackupRequest, - dict, - ], -) -def test_get_backup_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backup.Backup( - name="name_value", - database="database_value", - database_uid="database_uid_value", - state=backup.Backup.State.CREATING, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.name == "name_value" - assert response.database == "database_value" - assert response.database_uid == "database_uid_value" - assert response.state == backup.Backup.State.CREATING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_backup" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_backup_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_get_backup" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.GetBackupRequest.pb( - firestore_admin.GetBackupRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup.Backup.to_json(backup.Backup()) - req.return_value.content = return_value - - request = firestore_admin.GetBackupRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup.Backup() - post_with_metadata.return_value = backup.Backup(), metadata - - client.get_backup( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_backups_rest_bad_request(request_type=firestore_admin.ListBackupsRequest): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backups(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListBackupsRequest, - dict, - ], -) -def test_list_backups_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupsResponse( - unreachable=["unreachable_value"], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backups(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupsResponse) - assert response.unreachable == ["unreachable_value"] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backups_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_backups" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_backups_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_list_backups" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.ListBackupsRequest.pb( - firestore_admin.ListBackupsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore_admin.ListBackupsResponse.to_json( - firestore_admin.ListBackupsResponse() - ) - req.return_value.content = return_value - - request = firestore_admin.ListBackupsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListBackupsResponse() - post_with_metadata.return_value = ( - firestore_admin.ListBackupsResponse(), - metadata, - ) - - client.list_backups( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_backup_rest_bad_request( - request_type=firestore_admin.DeleteBackupRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteBackupRequest, - dict, - ], -) -def test_delete_backup_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "" - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_delete_backup" - ) as pre: - pre.assert_not_called() - pb_message = firestore_admin.DeleteBackupRequest.pb( - firestore_admin.DeleteBackupRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = firestore_admin.DeleteBackupRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_backup( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_restore_database_rest_bad_request( - request_type=firestore_admin.RestoreDatabaseRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.restore_database(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.RestoreDatabaseRequest, - dict, - ], -) -def test_restore_database_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.restore_database(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restore_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_restore_database" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_restore_database_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_restore_database" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.RestoreDatabaseRequest.pb( - firestore_admin.RestoreDatabaseRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = firestore_admin.RestoreDatabaseRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.restore_database( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_backup_schedule_rest_bad_request( - request_type=firestore_admin.CreateBackupScheduleRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_schedule(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CreateBackupScheduleRequest, - dict, - ], -) -def test_create_backup_schedule_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request_init["backup_schedule"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "retention": {"seconds": 751, "nanos": 543}, - "daily_recurrence": {}, - "weekly_recurrence": {"day": 1}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.CreateBackupScheduleRequest.meta.fields[ - "backup_schedule" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_schedule"][field])): - del request_init["backup_schedule"][field][i][subfield] - else: - del request_init["backup_schedule"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_create_backup_schedule" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, - "post_create_backup_schedule_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_create_backup_schedule" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.CreateBackupScheduleRequest.pb( - firestore_admin.CreateBackupScheduleRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) - req.return_value.content = return_value - - request = firestore_admin.CreateBackupScheduleRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schedule.BackupSchedule() - post_with_metadata.return_value = schedule.BackupSchedule(), metadata - - client.create_backup_schedule( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_backup_schedule_rest_bad_request( - request_type=firestore_admin.GetBackupScheduleRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_schedule(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.GetBackupScheduleRequest, - dict, - ], -) -def test_get_backup_schedule_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_get_backup_schedule" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, - "post_get_backup_schedule_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_get_backup_schedule" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.GetBackupScheduleRequest.pb( - firestore_admin.GetBackupScheduleRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) - req.return_value.content = return_value - - request = firestore_admin.GetBackupScheduleRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schedule.BackupSchedule() - post_with_metadata.return_value = schedule.BackupSchedule(), metadata - - client.get_backup_schedule( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_backup_schedules_rest_bad_request( - request_type=firestore_admin.ListBackupSchedulesRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_schedules(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.ListBackupSchedulesRequest, - dict, - ], -) -def test_list_backup_schedules_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore_admin.ListBackupSchedulesResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore_admin.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_schedules(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore_admin.ListBackupSchedulesResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_schedules_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_list_backup_schedules" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, - "post_list_backup_schedules_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_list_backup_schedules" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.ListBackupSchedulesRequest.pb( - firestore_admin.ListBackupSchedulesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore_admin.ListBackupSchedulesResponse.to_json( - firestore_admin.ListBackupSchedulesResponse() - ) - req.return_value.content = return_value - - request = firestore_admin.ListBackupSchedulesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore_admin.ListBackupSchedulesResponse() - post_with_metadata.return_value = ( - firestore_admin.ListBackupSchedulesResponse(), - metadata, - ) - - client.list_backup_schedules( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_backup_schedule_rest_bad_request( - request_type=firestore_admin.UpdateBackupScheduleRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "backup_schedule": { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup_schedule(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.UpdateBackupScheduleRequest, - dict, - ], -) -def test_update_backup_schedule_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "backup_schedule": { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - } - request_init["backup_schedule"] = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "retention": {"seconds": 751, "nanos": 543}, - "daily_recurrence": {}, - "weekly_recurrence": {"day": 1}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore_admin.UpdateBackupScheduleRequest.meta.fields[ - "backup_schedule" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_schedule"][field])): - del request_init["backup_schedule"][field][i][subfield] - else: - del request_init["backup_schedule"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schedule.BackupSchedule( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, schedule.BackupSchedule) - assert response.name == "name_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_update_backup_schedule" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, - "post_update_backup_schedule_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_update_backup_schedule" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.UpdateBackupScheduleRequest.pb( - firestore_admin.UpdateBackupScheduleRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = schedule.BackupSchedule.to_json(schedule.BackupSchedule()) - req.return_value.content = return_value - - request = firestore_admin.UpdateBackupScheduleRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schedule.BackupSchedule() - post_with_metadata.return_value = schedule.BackupSchedule(), metadata - - client.update_backup_schedule( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_backup_schedule_rest_bad_request( - request_type=firestore_admin.DeleteBackupScheduleRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_schedule(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.DeleteBackupScheduleRequest, - dict, - ], -) -def test_delete_backup_schedule_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/backupSchedules/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "" - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_delete_backup_schedule" - ) as pre: - pre.assert_not_called() - pb_message = firestore_admin.DeleteBackupScheduleRequest.pb( - firestore_admin.DeleteBackupScheduleRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = firestore_admin.DeleteBackupScheduleRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_backup_schedule( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_clone_database_rest_bad_request( - request_type=firestore_admin.CloneDatabaseRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.clone_database(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore_admin.CloneDatabaseRequest, - dict, - ], -) -def test_clone_database_rest_call_success(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.clone_database(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_clone_database_rest_interceptors(null_interceptor): - transport = transports.FirestoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FirestoreAdminRestInterceptor(), - ) - client = FirestoreAdminClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_clone_database" - ) as post, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "post_clone_database_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreAdminRestInterceptor, "pre_clone_database" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore_admin.CloneDatabaseRequest.pb( - firestore_admin.CloneDatabaseRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = firestore_admin.CloneDatabaseRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.clone_database( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_cancel_operation_rest_bad_request( - request_type=operations_pb2.CancelOperationRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "{}" - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - request_type=operations_pb2.DeleteOperationRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "{}" - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - request_type=operations_pb2.GetOperationRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - request_type=operations_pb2.ListOperationsRequest, -): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_initialize_client_w_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_index_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_index), "__call__") as call: - client.create_index(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateIndexRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_indexes_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: - client.list_indexes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListIndexesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_index_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_index), "__call__") as call: - client.get_index(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetIndexRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_index_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_index), "__call__") as call: - client.delete_index(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteIndexRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_field_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_field), "__call__") as call: - client.get_field(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetFieldRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_field_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_field), "__call__") as call: - client.update_field(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.UpdateFieldRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_fields_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_fields), "__call__") as call: - client.list_fields(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListFieldsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_documents_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.export_documents), "__call__") as call: - client.export_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ExportDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_import_documents_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.import_documents), "__call__") as call: - client.import_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ImportDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_bulk_delete_documents_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.bulk_delete_documents), "__call__" - ) as call: - client.bulk_delete_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.BulkDeleteDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_database_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_database), "__call__") as call: - client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_database), "__call__") as call: - client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_databases_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_database), "__call__") as call: - client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_database_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_database), "__call__") as call: - client.delete_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_user_creds_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_user_creds), "__call__" - ) as call: - client.create_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_user_creds_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_user_creds), "__call__") as call: - client.get_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_user_creds_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_user_creds), "__call__") as call: - client.list_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_enable_user_creds_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.enable_user_creds), "__call__" - ) as call: - client.enable_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.EnableUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_disable_user_creds_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.disable_user_creds), "__call__" - ) as call: - client.disable_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DisableUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_reset_user_password_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.reset_user_password), "__call__" - ) as call: - client.reset_user_password(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ResetUserPasswordRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_user_creds_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_user_creds), "__call__" - ) as call: - client.delete_user_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteUserCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backups_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restore_database_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.restore_database), "__call__") as call: - client.restore_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.RestoreDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_schedule_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), "__call__" - ) as call: - client.create_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CreateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_schedule_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), "__call__" - ) as call: - client.get_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.GetBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_schedules_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), "__call__" - ) as call: - client.list_backup_schedules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.ListBackupSchedulesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_schedule_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), "__call__" - ) as call: - client.update_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.UpdateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_schedule_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), "__call__" - ) as call: - client.delete_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.DeleteBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_clone_database_empty_call_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - client.clone_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore_admin.CloneDatabaseRequest() - - assert args[0] == request_msg - - -def test_clone_database_routing_parameters_request_1_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - client.clone_database( - request={"pitr_snapshot": {"database": "projects/sample1/sample2"}} - ) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore_admin.CloneDatabaseRequest( - **{"pitr_snapshot": {"database": "projects/sample1/sample2"}} - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - -def test_clone_database_routing_parameters_request_2_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.clone_database), "__call__") as call: - client.clone_database( - request={ - "pitr_snapshot": { - "database": "projects/sample1/databases/sample2/sample3" - } - } - ) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore_admin.CloneDatabaseRequest( - **{ - "pitr_snapshot": { - "database": "projects/sample1/databases/sample2/sample3" - } - } - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1", "database_id": "sample2"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - -def test_firestore_admin_rest_lro_client(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.FirestoreAdminGrpcTransport, - ) - - -def test_firestore_admin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.FirestoreAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_firestore_admin_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.FirestoreAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_index", - "list_indexes", - "get_index", - "delete_index", - "get_field", - "update_field", - "list_fields", - "export_documents", - "import_documents", - "bulk_delete_documents", - "create_database", - "get_database", - "list_databases", - "update_database", - "delete_database", - "create_user_creds", - "get_user_creds", - "list_user_creds", - "enable_user_creds", - "disable_user_creds", - "reset_user_password", - "delete_user_creds", - "get_backup", - "list_backups", - "delete_backup", - "restore_database", - "create_backup_schedule", - "get_backup_schedule", - "list_backup_schedules", - "update_backup_schedule", - "delete_backup_schedule", - "clone_database", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_firestore_admin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FirestoreAdminTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - quota_project_id="octopus", - ) - - -def test_firestore_admin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FirestoreAdminTransport() - adc.assert_called_once() - - -def test_firestore_admin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - FirestoreAdminClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminGrpcAsyncIOTransport, - ], -) -def test_firestore_admin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminGrpcAsyncIOTransport, - transports.FirestoreAdminRestTransport, - ], -) -def test_firestore_admin_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.FirestoreAdminGrpcTransport, grpc_helpers), - (transports.FirestoreAdminGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_firestore_admin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "firestore.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - scopes=["1", "2"], - default_host="firestore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminGrpcAsyncIOTransport, - ], -) -def test_firestore_admin_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_firestore_admin_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.FirestoreAdminRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_firestore_admin_host_no_port(transport_name): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="firestore.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "firestore.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://firestore.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_firestore_admin_host_with_port(transport_name): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="firestore.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "firestore.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://firestore.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_firestore_admin_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = FirestoreAdminClient( - credentials=creds1, - transport=transport_name, - ) - client2 = FirestoreAdminClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_index._session - session2 = client2.transport.create_index._session - assert session1 != session2 - session1 = client1.transport.list_indexes._session - session2 = client2.transport.list_indexes._session - assert session1 != session2 - session1 = client1.transport.get_index._session - session2 = client2.transport.get_index._session - assert session1 != session2 - session1 = client1.transport.delete_index._session - session2 = client2.transport.delete_index._session - assert session1 != session2 - session1 = client1.transport.get_field._session - session2 = client2.transport.get_field._session - assert session1 != session2 - session1 = client1.transport.update_field._session - session2 = client2.transport.update_field._session - assert session1 != session2 - session1 = client1.transport.list_fields._session - session2 = client2.transport.list_fields._session - assert session1 != session2 - session1 = client1.transport.export_documents._session - session2 = client2.transport.export_documents._session - assert session1 != session2 - session1 = client1.transport.import_documents._session - session2 = client2.transport.import_documents._session - assert session1 != session2 - session1 = client1.transport.bulk_delete_documents._session - session2 = client2.transport.bulk_delete_documents._session - assert session1 != session2 - session1 = client1.transport.create_database._session - session2 = client2.transport.create_database._session - assert session1 != session2 - session1 = client1.transport.get_database._session - session2 = client2.transport.get_database._session - assert session1 != session2 - session1 = client1.transport.list_databases._session - session2 = client2.transport.list_databases._session - assert session1 != session2 - session1 = client1.transport.update_database._session - session2 = client2.transport.update_database._session - assert session1 != session2 - session1 = client1.transport.delete_database._session - session2 = client2.transport.delete_database._session - assert session1 != session2 - session1 = client1.transport.create_user_creds._session - session2 = client2.transport.create_user_creds._session - assert session1 != session2 - session1 = client1.transport.get_user_creds._session - session2 = client2.transport.get_user_creds._session - assert session1 != session2 - session1 = client1.transport.list_user_creds._session - session2 = client2.transport.list_user_creds._session - assert session1 != session2 - session1 = client1.transport.enable_user_creds._session - session2 = client2.transport.enable_user_creds._session - assert session1 != session2 - session1 = client1.transport.disable_user_creds._session - session2 = client2.transport.disable_user_creds._session - assert session1 != session2 - session1 = client1.transport.reset_user_password._session - session2 = client2.transport.reset_user_password._session - assert session1 != session2 - session1 = client1.transport.delete_user_creds._session - session2 = client2.transport.delete_user_creds._session - assert session1 != session2 - session1 = client1.transport.get_backup._session - session2 = client2.transport.get_backup._session - assert session1 != session2 - session1 = client1.transport.list_backups._session - session2 = client2.transport.list_backups._session - assert session1 != session2 - session1 = client1.transport.delete_backup._session - session2 = client2.transport.delete_backup._session - assert session1 != session2 - session1 = client1.transport.restore_database._session - session2 = client2.transport.restore_database._session - assert session1 != session2 - session1 = client1.transport.create_backup_schedule._session - session2 = client2.transport.create_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.get_backup_schedule._session - session2 = client2.transport.get_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.list_backup_schedules._session - session2 = client2.transport.list_backup_schedules._session - assert session1 != session2 - session1 = client1.transport.update_backup_schedule._session - session2 = client2.transport.update_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.delete_backup_schedule._session - session2 = client2.transport.delete_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.clone_database._session - session2 = client2.transport.clone_database._session - assert session1 != session2 - - -def test_firestore_admin_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.FirestoreAdminGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_firestore_admin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.FirestoreAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminGrpcAsyncIOTransport, - ], -) -def test_firestore_admin_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreAdminGrpcTransport, - transports.FirestoreAdminGrpcAsyncIOTransport, - ], -) -def test_firestore_admin_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_firestore_admin_grpc_lro_client(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_firestore_admin_grpc_lro_async_client(): - client = FirestoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_backup_path(): - project = "squid" - location = "clam" - backup = "whelk" - expected = "projects/{project}/locations/{location}/backups/{backup}".format( - project=project, - location=location, - backup=backup, - ) - actual = FirestoreAdminClient.backup_path(project, location, backup) - assert expected == actual - - -def test_parse_backup_path(): - expected = { - "project": "octopus", - "location": "oyster", - "backup": "nudibranch", - } - path = FirestoreAdminClient.backup_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_backup_path(path) - assert expected == actual - - -def test_backup_schedule_path(): - project = "cuttlefish" - database = "mussel" - backup_schedule = "winkle" - expected = "projects/{project}/databases/{database}/backupSchedules/{backup_schedule}".format( - project=project, - database=database, - backup_schedule=backup_schedule, - ) - actual = FirestoreAdminClient.backup_schedule_path( - project, database, backup_schedule - ) - assert expected == actual - - -def test_parse_backup_schedule_path(): - expected = { - "project": "nautilus", - "database": "scallop", - "backup_schedule": "abalone", - } - path = FirestoreAdminClient.backup_schedule_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_backup_schedule_path(path) - assert expected == actual - - -def test_collection_group_path(): - project = "squid" - database = "clam" - collection = "whelk" - expected = ( - "projects/{project}/databases/{database}/collectionGroups/{collection}".format( - project=project, - database=database, - collection=collection, - ) - ) - actual = FirestoreAdminClient.collection_group_path(project, database, collection) - assert expected == actual - - -def test_parse_collection_group_path(): - expected = { - "project": "octopus", - "database": "oyster", - "collection": "nudibranch", - } - path = FirestoreAdminClient.collection_group_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_collection_group_path(path) - assert expected == actual - - -def test_database_path(): - project = "cuttlefish" - database = "mussel" - expected = "projects/{project}/databases/{database}".format( - project=project, - database=database, - ) - actual = FirestoreAdminClient.database_path(project, database) - assert expected == actual - - -def test_parse_database_path(): - expected = { - "project": "winkle", - "database": "nautilus", - } - path = FirestoreAdminClient.database_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_database_path(path) - assert expected == actual - - -def test_field_path(): - project = "scallop" - database = "abalone" - collection = "squid" - field = "clam" - expected = "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}".format( - project=project, - database=database, - collection=collection, - field=field, - ) - actual = FirestoreAdminClient.field_path(project, database, collection, field) - assert expected == actual - - -def test_parse_field_path(): - expected = { - "project": "whelk", - "database": "octopus", - "collection": "oyster", - "field": "nudibranch", - } - path = FirestoreAdminClient.field_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_field_path(path) - assert expected == actual - - -def test_index_path(): - project = "cuttlefish" - database = "mussel" - collection = "winkle" - index = "nautilus" - expected = "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}".format( - project=project, - database=database, - collection=collection, - index=index, - ) - actual = FirestoreAdminClient.index_path(project, database, collection, index) - assert expected == actual - - -def test_parse_index_path(): - expected = { - "project": "scallop", - "database": "abalone", - "collection": "squid", - "index": "clam", - } - path = FirestoreAdminClient.index_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_index_path(path) - assert expected == actual - - -def test_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = FirestoreAdminClient.location_path(project, location) - assert expected == actual - - -def test_parse_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = FirestoreAdminClient.location_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_location_path(path) - assert expected == actual - - -def test_operation_path(): - project = "cuttlefish" - database = "mussel" - operation = "winkle" - expected = "projects/{project}/databases/{database}/operations/{operation}".format( - project=project, - database=database, - operation=operation, - ) - actual = FirestoreAdminClient.operation_path(project, database, operation) - assert expected == actual - - -def test_parse_operation_path(): - expected = { - "project": "nautilus", - "database": "scallop", - "operation": "abalone", - } - path = FirestoreAdminClient.operation_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_operation_path(path) - assert expected == actual - - -def test_user_creds_path(): - project = "squid" - database = "clam" - user_creds = "whelk" - expected = "projects/{project}/databases/{database}/userCreds/{user_creds}".format( - project=project, - database=database, - user_creds=user_creds, - ) - actual = FirestoreAdminClient.user_creds_path(project, database, user_creds) - assert expected == actual - - -def test_parse_user_creds_path(): - expected = { - "project": "octopus", - "database": "oyster", - "user_creds": "nudibranch", - } - path = FirestoreAdminClient.user_creds_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_user_creds_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = FirestoreAdminClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = FirestoreAdminClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = FirestoreAdminClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = FirestoreAdminClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = FirestoreAdminClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = FirestoreAdminClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = FirestoreAdminClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = FirestoreAdminClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = FirestoreAdminClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = FirestoreAdminClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreAdminClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.FirestoreAdminTransport, "_prep_wrapped_messages" - ) as prep: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.FirestoreAdminTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = FirestoreAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_delete_operation_from_dict(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_operation_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_cancel_operation_from_dict(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_get_operation_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_get_operation_from_dict(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_list_operations_field_headers(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_list_operations_from_dict(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" - ) - with mock.patch.object( - type(getattr(client.transport, "_grpc_channel")), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = FirestoreAdminAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - with mock.patch.object( - type(getattr(client.transport, "_grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - with mock.patch.object( - type(getattr(client.transport, "_session")), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - "rest", - "grpc", - ] - for transport in transports: - client = FirestoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - - -@pytest.mark.parametrize( - "client_class,transport_class", - [ - (FirestoreAdminClient, transports.FirestoreAdminGrpcTransport), - (FirestoreAdminAsyncClient, transports.FirestoreAdminGrpcAsyncIOTransport), - ], -) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/tests/unit/gapic/firestore_v1/__init__.py b/tests/unit/gapic/firestore_v1/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/tests/unit/gapic/firestore_v1/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/firestore_v1/test_firestore.py b/tests/unit/gapic/firestore_v1/test_firestore.py deleted file mode 100644 index eac609cab4..0000000000 --- a/tests/unit/gapic/firestore_v1/test_firestore.py +++ /dev/null @@ -1,12782 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os - -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.firestore_v1.services.firestore import FirestoreAsyncClient -from google.cloud.firestore_v1.services.firestore import FirestoreClient -from google.cloud.firestore_v1.services.firestore import pagers -from google.cloud.firestore_v1.services.firestore import transports -from google.cloud.firestore_v1.types import aggregation_result -from google.cloud.firestore_v1.types import common -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import firestore -from google.cloud.firestore_v1.types import query -from google.cloud.firestore_v1.types import query_profile -from google.cloud.firestore_v1.types import write as gf_write -from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import latlng_pb2 # type: ignore -import google.auth - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return ( - "foo.googleapis.com" - if ("localhost" in client.DEFAULT_ENDPOINT) - else client.DEFAULT_ENDPOINT - ) - - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return ( - "test.{UNIVERSE_DOMAIN}" - if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) - else client._DEFAULT_ENDPOINT_TEMPLATE - ) - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert FirestoreClient._get_default_mtls_endpoint(None) is None - assert FirestoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ( - FirestoreClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint - ) - assert ( - FirestoreClient._get_default_mtls_endpoint(sandbox_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - FirestoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) - == sandbox_mtls_endpoint - ) - assert FirestoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -def test__read_environment_variables(): - assert FirestoreClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert FirestoreClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert FirestoreClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - FirestoreClient._read_environment_variables() - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert FirestoreClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert FirestoreClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert FirestoreClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - FirestoreClient._read_environment_variables() - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert FirestoreClient._read_environment_variables() == ( - False, - "auto", - "foo.com", - ) - - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert FirestoreClient._get_client_cert_source(None, False) is None - assert ( - FirestoreClient._get_client_cert_source(mock_provided_cert_source, False) - is None - ) - assert ( - FirestoreClient._get_client_cert_source(mock_provided_cert_source, True) - == mock_provided_cert_source - ) - - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", return_value=True - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_default_cert_source, - ): - assert ( - FirestoreClient._get_client_cert_source(None, True) - is mock_default_cert_source - ) - assert ( - FirestoreClient._get_client_cert_source( - mock_provided_cert_source, "true" - ) - is mock_provided_cert_source - ) - - -@mock.patch.object( - FirestoreClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreClient), -) -@mock.patch.object( - FirestoreAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAsyncClient), -) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = FirestoreClient._DEFAULT_UNIVERSE - default_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=default_universe - ) - mock_universe = "bar.com" - mock_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=mock_universe - ) - - assert ( - FirestoreClient._get_api_endpoint( - api_override, mock_client_cert_source, default_universe, "always" - ) - == api_override - ) - assert ( - FirestoreClient._get_api_endpoint( - None, mock_client_cert_source, default_universe, "auto" - ) - == FirestoreClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - FirestoreClient._get_api_endpoint(None, None, default_universe, "auto") - == default_endpoint - ) - assert ( - FirestoreClient._get_api_endpoint(None, None, default_universe, "always") - == FirestoreClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - FirestoreClient._get_api_endpoint( - None, mock_client_cert_source, default_universe, "always" - ) - == FirestoreClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - FirestoreClient._get_api_endpoint(None, None, mock_universe, "never") - == mock_endpoint - ) - assert ( - FirestoreClient._get_api_endpoint(None, None, default_universe, "never") - == default_endpoint - ) - - with pytest.raises(MutualTLSChannelError) as excinfo: - FirestoreClient._get_api_endpoint( - None, mock_client_cert_source, mock_universe, "auto" - ) - assert ( - str(excinfo.value) - == "mTLS is not supported in any universe other than googleapis.com." - ) - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert ( - FirestoreClient._get_universe_domain( - client_universe_domain, universe_domain_env - ) - == client_universe_domain - ) - assert ( - FirestoreClient._get_universe_domain(None, universe_domain_env) - == universe_domain_env - ) - assert ( - FirestoreClient._get_universe_domain(None, None) - == FirestoreClient._DEFAULT_UNIVERSE - ) - - with pytest.raises(ValueError) as excinfo: - FirestoreClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - - -@pytest.mark.parametrize( - "error_code,cred_info_json,show_cred_info", - [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False), - ], -) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = FirestoreClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - - -@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = FirestoreClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (FirestoreClient, "grpc"), - (FirestoreAsyncClient, "grpc_asyncio"), - (FirestoreClient, "rest"), - ], -) -def test_firestore_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_info" - ) as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "firestore.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://firestore.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_class,transport_name", - [ - (transports.FirestoreGrpcTransport, "grpc"), - (transports.FirestoreGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.FirestoreRestTransport, "rest"), - ], -) -def test_firestore_client_service_account_always_use_jwt( - transport_class, transport_name -): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (FirestoreClient, "grpc"), - (FirestoreAsyncClient, "grpc_asyncio"), - (FirestoreClient, "rest"), - ], -) -def test_firestore_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: - factory.return_value = creds - client = client_class.from_service_account_file( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "firestore.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://firestore.googleapis.com" - ) - - -def test_firestore_client_get_transport_class(): - transport = FirestoreClient.get_transport_class() - available_transports = [ - transports.FirestoreGrpcTransport, - transports.FirestoreRestTransport, - ] - assert transport in available_transports - - transport = FirestoreClient.get_transport_class("grpc") - assert transport == transports.FirestoreGrpcTransport - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), - ( - FirestoreAsyncClient, - transports.FirestoreGrpcAsyncIOTransport, - "grpc_asyncio", - ), - (FirestoreClient, transports.FirestoreRestTransport, "rest"), - ], -) -@mock.patch.object( - FirestoreClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreClient), -) -@mock.patch.object( - FirestoreAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAsyncClient), -) -def test_firestore_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(FirestoreClient, "get_transport_class") as gtc: - transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(FirestoreClient, "get_transport_class") as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions( - api_audience="https://language.googleapis.com" - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com", - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,use_client_cert_env", - [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", "true"), - ( - FirestoreAsyncClient, - transports.FirestoreGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", "false"), - ( - FirestoreAsyncClient, - transports.FirestoreGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), - (FirestoreClient, transports.FirestoreRestTransport, "rest", "true"), - (FirestoreClient, transports.FirestoreRestTransport, "rest", "false"), - ], -) -@mock.patch.object( - FirestoreClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreClient), -) -@mock.patch.object( - FirestoreAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAsyncClient), -) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_firestore_client_mtls_env_auto( - client_class, transport_class, transport_name, use_client_cert_env -): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=client_cert_source_callback, - ): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [FirestoreClient, FirestoreAsyncClient]) -@mock.patch.object( - FirestoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirestoreClient) -) -@mock.patch.object( - FirestoreAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(FirestoreAsyncClient), -) -def test_firestore_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_client_cert_source, - ): - ( - api_endpoint, - cert_source, - ) = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - -@pytest.mark.parametrize("client_class", [FirestoreClient, FirestoreAsyncClient]) -@mock.patch.object( - FirestoreClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreClient), -) -@mock.patch.object( - FirestoreAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(FirestoreAsyncClient), -) -def test_firestore_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = FirestoreClient._DEFAULT_UNIVERSE - default_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=default_universe - ) - mock_universe = "bar.com" - mock_endpoint = FirestoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=mock_universe - ) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ): - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=api_override - ) - client = client_class( - client_options=options, - credentials=ga_credentials.AnonymousCredentials(), - ) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - else: - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - assert client.api_endpoint == ( - mock_endpoint if universe_exists else default_endpoint - ) - assert client.universe_domain == ( - mock_universe if universe_exists else default_universe - ) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), - ( - FirestoreAsyncClient, - transports.FirestoreGrpcAsyncIOTransport, - "grpc_asyncio", - ), - (FirestoreClient, transports.FirestoreRestTransport, "rest"), - ], -) -def test_firestore_client_client_options_scopes( - client_class, transport_class, transport_name -): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", grpc_helpers), - ( - FirestoreAsyncClient, - transports.FirestoreGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - (FirestoreClient, transports.FirestoreRestTransport, "rest", None), - ], -) -def test_firestore_client_client_options_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -def test_firestore_client_client_options_from_dict(): - with mock.patch( - "google.cloud.firestore_v1.services.firestore.transports.FirestoreGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = FirestoreClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc", grpc_helpers), - ( - FirestoreAsyncClient, - transports.FirestoreGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_firestore_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "firestore.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - scopes=None, - default_host="firestore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.GetDocumentRequest, - dict, - ], -) -def test_get_document(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = document.Document( - name="name_value", - ) - response = client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.GetDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - - -def test_get_document_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.GetDocumentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_document), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_document(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.GetDocumentRequest( - name="name_value", - ) - - -def test_get_document_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_document in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_document] = mock_rpc - request = {} - client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_document_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_document - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_document - ] = mock_rpc - - request = {} - await client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_document_async( - transport: str = "grpc_asyncio", request_type=firestore.GetDocumentRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document.Document( - name="name_value", - ) - ) - response = await client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.GetDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_get_document_async_from_dict(): - await test_get_document_async(request_type=dict) - - -def test_get_document_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.GetDocumentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_document), "__call__") as call: - call.return_value = document.Document() - client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_document_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.GetDocumentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_document), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) - await client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.ListDocumentsRequest, - dict, - ], -) -def test_list_documents(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListDocumentsResponse( - next_page_token="next_page_token_value", - ) - response = client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.ListDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_documents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.ListDocumentsRequest( - parent="parent_value", - collection_id="collection_id_value", - page_token="page_token_value", - order_by="order_by_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_documents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListDocumentsRequest( - parent="parent_value", - collection_id="collection_id_value", - page_token="page_token_value", - order_by="order_by_value", - ) - - -def test_list_documents_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_documents in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc - request = {} - client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_documents_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.list_documents - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_documents - ] = mock_rpc - - request = {} - await client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_documents_async( - transport: str = "grpc_asyncio", request_type=firestore.ListDocumentsRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListDocumentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.ListDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_documents_async_from_dict(): - await test_list_documents_async(request_type=dict) - - -def test_list_documents_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListDocumentsRequest() - - request.parent = "parent_value" - request.collection_id = "collection_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - call.return_value = firestore.ListDocumentsResponse() - client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value&collection_id=collection_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_documents_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListDocumentsRequest() - - request.parent = "parent_value" - request.collection_id = "collection_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListDocumentsResponse() - ) - await client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value&collection_id=collection_id_value", - ) in kw["metadata"] - - -def test_list_documents_pager(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - firestore.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("parent", ""), - ("collection_id", ""), - ) - ), - ) - pager = client.list_documents(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - - -def test_list_documents_pages(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - firestore.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - RuntimeError, - ) - pages = list(client.list_documents(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_documents_async_pager(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - firestore.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_documents( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, document.Document) for i in responses) - - -@pytest.mark.asyncio -async def test_list_documents_async_pages(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_documents), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - firestore.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_documents(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.UpdateDocumentRequest, - dict, - ], -) -def test_update_document(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = gf_document.Document( - name="name_value", - ) - response = client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.UpdateDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gf_document.Document) - assert response.name == "name_value" - - -def test_update_document_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.UpdateDocumentRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_document(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.UpdateDocumentRequest() - - -def test_update_document_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_document in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_document] = mock_rpc - request = {} - client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_update_document_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_document - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_document - ] = mock_rpc - - request = {} - await client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_update_document_async( - transport: str = "grpc_asyncio", request_type=firestore.UpdateDocumentRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gf_document.Document( - name="name_value", - ) - ) - response = await client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.UpdateDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gf_document.Document) - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_update_document_async_from_dict(): - await test_update_document_async(request_type=dict) - - -def test_update_document_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.UpdateDocumentRequest() - - request.document.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - call.return_value = gf_document.Document() - client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "document.name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_document_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.UpdateDocumentRequest() - - request.document.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gf_document.Document() - ) - await client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "document.name=name_value", - ) in kw["metadata"] - - -def test_update_document_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = gf_document.Document() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_document( - document=gf_document.Document(name="name_value"), - update_mask=common.DocumentMask(field_paths=["field_paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = gf_document.Document(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = common.DocumentMask(field_paths=["field_paths_value"]) - assert arg == mock_val - - -def test_update_document_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_document( - firestore.UpdateDocumentRequest(), - document=gf_document.Document(name="name_value"), - update_mask=common.DocumentMask(field_paths=["field_paths_value"]), - ) - - -@pytest.mark.asyncio -async def test_update_document_flattened_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = gf_document.Document() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gf_document.Document() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_document( - document=gf_document.Document(name="name_value"), - update_mask=common.DocumentMask(field_paths=["field_paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = gf_document.Document(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = common.DocumentMask(field_paths=["field_paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_document_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_document( - firestore.UpdateDocumentRequest(), - document=gf_document.Document(name="name_value"), - update_mask=common.DocumentMask(field_paths=["field_paths_value"]), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.DeleteDocumentRequest, - dict, - ], -) -def test_delete_document(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.DeleteDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_document_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.DeleteDocumentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_document(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.DeleteDocumentRequest( - name="name_value", - ) - - -def test_delete_document_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_document in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc - request = {} - client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_document_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_document - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_document - ] = mock_rpc - - request = {} - await client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_document_async( - transport: str = "grpc_asyncio", request_type=firestore.DeleteDocumentRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.DeleteDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_document_async_from_dict(): - await test_delete_document_async(request_type=dict) - - -def test_delete_document_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.DeleteDocumentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - call.return_value = None - client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_document_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.DeleteDocumentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_delete_document_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_document( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_document_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - firestore.DeleteDocumentRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_document_flattened_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_document( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_document_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_document( - firestore.DeleteDocumentRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.BatchGetDocumentsRequest, - dict, - ], -) -def test_batch_get_documents(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.BatchGetDocumentsResponse()]) - response = client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.BatchGetDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.BatchGetDocumentsResponse) - - -def test_batch_get_documents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.BatchGetDocumentsRequest( - database="database_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.batch_get_documents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchGetDocumentsRequest( - database="database_value", - ) - - -def test_batch_get_documents_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.batch_get_documents in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.batch_get_documents - ] = mock_rpc - request = {} - client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_get_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_batch_get_documents_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.batch_get_documents - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.batch_get_documents - ] = mock_rpc - - request = {} - await client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.batch_get_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_batch_get_documents_async( - transport: str = "grpc_asyncio", request_type=firestore.BatchGetDocumentsRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.BatchGetDocumentsResponse()] - ) - response = await client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.BatchGetDocumentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.BatchGetDocumentsResponse) - - -@pytest.mark.asyncio -async def test_batch_get_documents_async_from_dict(): - await test_batch_get_documents_async(request_type=dict) - - -def test_batch_get_documents_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BatchGetDocumentsRequest() - - request.database = "database_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), "__call__" - ) as call: - call.return_value = iter([firestore.BatchGetDocumentsResponse()]) - client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database=database_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_batch_get_documents_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BatchGetDocumentsRequest() - - request.database = "database_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), "__call__" - ) as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.BatchGetDocumentsResponse()] - ) - await client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database=database_value", - ) in kw["metadata"] - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.BeginTransactionRequest, - dict, - ], -) -def test_begin_transaction(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.BeginTransactionResponse( - transaction=b"transaction_blob", - ) - response = client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.BeginTransactionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BeginTransactionResponse) - assert response.transaction == b"transaction_blob" - - -def test_begin_transaction_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.BeginTransactionRequest( - database="database_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.begin_transaction(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BeginTransactionRequest( - database="database_value", - ) - - -def test_begin_transaction_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.begin_transaction in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.begin_transaction - ] = mock_rpc - request = {} - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.begin_transaction(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_begin_transaction_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.begin_transaction - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.begin_transaction - ] = mock_rpc - - request = {} - await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.begin_transaction(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_begin_transaction_async( - transport: str = "grpc_asyncio", request_type=firestore.BeginTransactionRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BeginTransactionResponse( - transaction=b"transaction_blob", - ) - ) - response = await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.BeginTransactionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BeginTransactionResponse) - assert response.transaction == b"transaction_blob" - - -@pytest.mark.asyncio -async def test_begin_transaction_async_from_dict(): - await test_begin_transaction_async(request_type=dict) - - -def test_begin_transaction_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BeginTransactionRequest() - - request.database = "database_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - call.return_value = firestore.BeginTransactionResponse() - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database=database_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_begin_transaction_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BeginTransactionRequest() - - request.database = "database_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BeginTransactionResponse() - ) - await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database=database_value", - ) in kw["metadata"] - - -def test_begin_transaction_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.BeginTransactionResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.begin_transaction( - database="database_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = "database_value" - assert arg == mock_val - - -def test_begin_transaction_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - firestore.BeginTransactionRequest(), - database="database_value", - ) - - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.BeginTransactionResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BeginTransactionResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.begin_transaction( - database="database_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = "database_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.begin_transaction( - firestore.BeginTransactionRequest(), - database="database_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.CommitRequest, - dict, - ], -) -def test_commit(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.CommitResponse() - response = client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.CommitRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.CommitResponse) - - -def test_commit_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.CommitRequest( - database="database_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.commit(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CommitRequest( - database="database_value", - ) - - -def test_commit_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.commit in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.commit] = mock_rpc - request = {} - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.commit(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.commit - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.commit - ] = mock_rpc - - request = {} - await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.commit(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_commit_async( - transport: str = "grpc_asyncio", request_type=firestore.CommitRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.CommitResponse() - ) - response = await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.CommitRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.CommitResponse) - - -@pytest.mark.asyncio -async def test_commit_async_from_dict(): - await test_commit_async(request_type=dict) - - -def test_commit_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CommitRequest() - - request.database = "database_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - call.return_value = firestore.CommitResponse() - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database=database_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_commit_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CommitRequest() - - request.database = "database_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.CommitResponse() - ) - await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database=database_value", - ) in kw["metadata"] - - -def test_commit_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.CommitResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.commit( - database="database_value", - writes=[gf_write.Write(update=document.Document(name="name_value"))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = "database_value" - assert arg == mock_val - arg = args[0].writes - mock_val = [gf_write.Write(update=document.Document(name="name_value"))] - assert arg == mock_val - - -def test_commit_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - firestore.CommitRequest(), - database="database_value", - writes=[gf_write.Write(update=document.Document(name="name_value"))], - ) - - -@pytest.mark.asyncio -async def test_commit_flattened_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.CommitResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.CommitResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.commit( - database="database_value", - writes=[gf_write.Write(update=document.Document(name="name_value"))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = "database_value" - assert arg == mock_val - arg = args[0].writes - mock_val = [gf_write.Write(update=document.Document(name="name_value"))] - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_commit_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.commit( - firestore.CommitRequest(), - database="database_value", - writes=[gf_write.Write(update=document.Document(name="name_value"))], - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.RollbackRequest, - dict, - ], -) -def test_rollback(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.RollbackRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_rollback_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.RollbackRequest( - database="database_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.rollback(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RollbackRequest( - database="database_value", - ) - - -def test_rollback_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rollback in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.rollback] = mock_rpc - request = {} - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rollback(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.rollback - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.rollback - ] = mock_rpc - - request = {} - await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.rollback(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_rollback_async( - transport: str = "grpc_asyncio", request_type=firestore.RollbackRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.RollbackRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_rollback_async_from_dict(): - await test_rollback_async(request_type=dict) - - -def test_rollback_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RollbackRequest() - - request.database = "database_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - call.return_value = None - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database=database_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_rollback_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RollbackRequest() - - request.database = "database_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database=database_value", - ) in kw["metadata"] - - -def test_rollback_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rollback( - database="database_value", - transaction=b"transaction_blob", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = "database_value" - assert arg == mock_val - arg = args[0].transaction - mock_val = b"transaction_blob" - assert arg == mock_val - - -def test_rollback_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - firestore.RollbackRequest(), - database="database_value", - transaction=b"transaction_blob", - ) - - -@pytest.mark.asyncio -async def test_rollback_flattened_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rollback( - database="database_value", - transaction=b"transaction_blob", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = "database_value" - assert arg == mock_val - arg = args[0].transaction - mock_val = b"transaction_blob" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_rollback_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rollback( - firestore.RollbackRequest(), - database="database_value", - transaction=b"transaction_blob", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.RunQueryRequest, - dict, - ], -) -def test_run_query(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.RunQueryResponse()]) - response = client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.RunQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.RunQueryResponse) - - -def test_run_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.RunQueryRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.run_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunQueryRequest( - parent="parent_value", - ) - - -def test_run_query_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.run_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.run_query] = mock_rpc - request = {} - client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.run_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_run_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.run_query - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.run_query - ] = mock_rpc - - request = {} - await client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.run_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_run_query_async( - transport: str = "grpc_asyncio", request_type=firestore.RunQueryRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.RunQueryResponse()] - ) - response = await client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.RunQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.RunQueryResponse) - - -@pytest.mark.asyncio -async def test_run_query_async_from_dict(): - await test_run_query_async(request_type=dict) - - -def test_run_query_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RunQueryRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - call.return_value = iter([firestore.RunQueryResponse()]) - client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_run_query_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RunQueryRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.RunQueryResponse()] - ) - await client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.RunAggregationQueryRequest, - dict, - ], -) -def test_run_aggregation_query(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.RunAggregationQueryResponse()]) - response = client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.RunAggregationQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.RunAggregationQueryResponse) - - -def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.RunAggregationQueryRequest( - parent="parent_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.run_aggregation_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunAggregationQueryRequest( - parent="parent_value", - ) - - -def test_run_aggregation_query_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.run_aggregation_query - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.run_aggregation_query - ] = mock_rpc - request = {} - client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.run_aggregation_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_run_aggregation_query_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.run_aggregation_query - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.run_aggregation_query - ] = mock_rpc - - request = {} - await client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.run_aggregation_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_run_aggregation_query_async( - transport: str = "grpc_asyncio", request_type=firestore.RunAggregationQueryRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.RunAggregationQueryResponse()] - ) - response = await client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.RunAggregationQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.RunAggregationQueryResponse) - - -@pytest.mark.asyncio -async def test_run_aggregation_query_async_from_dict(): - await test_run_aggregation_query_async(request_type=dict) - - -def test_run_aggregation_query_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RunAggregationQueryRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - call.return_value = iter([firestore.RunAggregationQueryResponse()]) - client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_run_aggregation_query_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RunAggregationQueryRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.RunAggregationQueryResponse()] - ) - await client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.PartitionQueryRequest, - dict, - ], -) -def test_partition_query(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.PartitionQueryResponse( - next_page_token="next_page_token_value", - ) - response = client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.PartitionQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.PartitionQueryPager) - assert response.next_page_token == "next_page_token_value" - - -def test_partition_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.PartitionQueryRequest( - parent="parent_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.partition_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.PartitionQueryRequest( - parent="parent_value", - page_token="page_token_value", - ) - - -def test_partition_query_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.partition_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc - request = {} - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.partition_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_partition_query_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.partition_query - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.partition_query - ] = mock_rpc - - request = {} - await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.partition_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_partition_query_async( - transport: str = "grpc_asyncio", request_type=firestore.PartitionQueryRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.PartitionQueryResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.PartitionQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.PartitionQueryAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_partition_query_async_from_dict(): - await test_partition_query_async(request_type=dict) - - -def test_partition_query_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.PartitionQueryRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - call.return_value = firestore.PartitionQueryResponse() - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_partition_query_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.PartitionQueryRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.PartitionQueryResponse() - ) - await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_partition_query_pager(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - query.Cursor(), - ], - next_page_token="abc", - ), - firestore.PartitionQueryResponse( - partitions=[], - next_page_token="def", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - ], - next_page_token="ghi", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.partition_query(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, query.Cursor) for i in results) - - -def test_partition_query_pages(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - query.Cursor(), - ], - next_page_token="abc", - ), - firestore.PartitionQueryResponse( - partitions=[], - next_page_token="def", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - ], - next_page_token="ghi", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - ], - ), - RuntimeError, - ) - pages = list(client.partition_query(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_partition_query_async_pager(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - query.Cursor(), - ], - next_page_token="abc", - ), - firestore.PartitionQueryResponse( - partitions=[], - next_page_token="def", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - ], - next_page_token="ghi", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - ], - ), - RuntimeError, - ) - async_pager = await client.partition_query( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, query.Cursor) for i in responses) - - -@pytest.mark.asyncio -async def test_partition_query_async_pages(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - query.Cursor(), - ], - next_page_token="abc", - ), - firestore.PartitionQueryResponse( - partitions=[], - next_page_token="def", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - ], - next_page_token="ghi", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.partition_query(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.WriteRequest, - dict, - ], -) -def test_write(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.write), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.WriteResponse()]) - response = client.write(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.WriteResponse) - - -def test_write_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.write in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.write] = mock_rpc - request = [{}] - client.write(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.write(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_write_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.write - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.write - ] = mock_rpc - - request = [{}] - await client.write(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.write(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_write_async( - transport: str = "grpc_asyncio", request_type=firestore.WriteRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.write), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.WriteResponse()]) - response = await client.write(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.WriteResponse) - - -@pytest.mark.asyncio -async def test_write_async_from_dict(): - await test_write_async(request_type=dict) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.ListenRequest, - dict, - ], -) -def test_listen(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.listen), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.ListenResponse()]) - response = client.listen(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.ListenResponse) - - -def test_listen_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.listen in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.listen] = mock_rpc - request = [{}] - client.listen(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.listen(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_listen_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.listen - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.listen - ] = mock_rpc - - request = [{}] - await client.listen(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.listen(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_listen_async( - transport: str = "grpc_asyncio", request_type=firestore.ListenRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.listen), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.ListenResponse()] - ) - response = await client.listen(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.ListenResponse) - - -@pytest.mark.asyncio -async def test_listen_async_from_dict(): - await test_listen_async(request_type=dict) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.ListCollectionIdsRequest, - dict, - ], -) -def test_list_collection_ids(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListCollectionIdsResponse( - collection_ids=["collection_ids_value"], - next_page_token="next_page_token_value", - ) - response = client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.ListCollectionIdsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCollectionIdsPager) - assert response.collection_ids == ["collection_ids_value"] - assert response.next_page_token == "next_page_token_value" - - -def test_list_collection_ids_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.ListCollectionIdsRequest( - parent="parent_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_collection_ids(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListCollectionIdsRequest( - parent="parent_value", - page_token="page_token_value", - ) - - -def test_list_collection_ids_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_collection_ids in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_collection_ids - ] = mock_rpc - request = {} - client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_collection_ids(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_collection_ids_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.list_collection_ids - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_collection_ids - ] = mock_rpc - - request = {} - await client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_collection_ids(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_list_collection_ids_async( - transport: str = "grpc_asyncio", request_type=firestore.ListCollectionIdsRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListCollectionIdsResponse( - collection_ids=["collection_ids_value"], - next_page_token="next_page_token_value", - ) - ) - response = await client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.ListCollectionIdsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCollectionIdsAsyncPager) - assert response.collection_ids == ["collection_ids_value"] - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_collection_ids_async_from_dict(): - await test_list_collection_ids_async(request_type=dict) - - -def test_list_collection_ids_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListCollectionIdsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - call.return_value = firestore.ListCollectionIdsResponse() - client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_collection_ids_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListCollectionIdsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListCollectionIdsResponse() - ) - await client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_collection_ids_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListCollectionIdsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_collection_ids( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_collection_ids_flattened_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_collection_ids( - firestore.ListCollectionIdsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_collection_ids_flattened_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListCollectionIdsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListCollectionIdsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_collection_ids( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_collection_ids_flattened_error_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_collection_ids( - firestore.ListCollectionIdsRequest(), - parent="parent_value", - ) - - -def test_list_collection_ids_pager(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - str(), - ], - next_page_token="abc", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[], - next_page_token="def", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - ], - next_page_token="ghi", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_collection_ids(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, str) for i in results) - - -def test_list_collection_ids_pages(transport_name: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - str(), - ], - next_page_token="abc", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[], - next_page_token="def", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - ], - next_page_token="ghi", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = list(client.list_collection_ids(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_collection_ids_async_pager(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - str(), - ], - next_page_token="abc", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[], - next_page_token="def", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - ], - next_page_token="ghi", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_collection_ids( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, str) for i in responses) - - -@pytest.mark.asyncio -async def test_list_collection_ids_async_pages(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - str(), - ], - next_page_token="abc", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[], - next_page_token="def", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - ], - next_page_token="ghi", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_collection_ids(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.BatchWriteRequest, - dict, - ], -) -def test_batch_write(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.batch_write), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.BatchWriteResponse() - response = client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.BatchWriteRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BatchWriteResponse) - - -def test_batch_write_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.BatchWriteRequest( - database="database_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.batch_write), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.batch_write(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchWriteRequest( - database="database_value", - ) - - -def test_batch_write_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_write in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.batch_write] = mock_rpc - request = {} - client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_write(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_batch_write_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.batch_write - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.batch_write - ] = mock_rpc - - request = {} - await client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.batch_write(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_batch_write_async( - transport: str = "grpc_asyncio", request_type=firestore.BatchWriteRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.batch_write), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BatchWriteResponse() - ) - response = await client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.BatchWriteRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BatchWriteResponse) - - -@pytest.mark.asyncio -async def test_batch_write_async_from_dict(): - await test_batch_write_async(request_type=dict) - - -def test_batch_write_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BatchWriteRequest() - - request.database = "database_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.batch_write), "__call__") as call: - call.return_value = firestore.BatchWriteResponse() - client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database=database_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_batch_write_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BatchWriteRequest() - - request.database = "database_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.batch_write), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BatchWriteResponse() - ) - await client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "database=database_value", - ) in kw["metadata"] - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.CreateDocumentRequest, - dict, - ], -) -def test_create_document(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = document.Document( - name="name_value", - ) - response = client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.CreateDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - - -def test_create_document_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.CreateDocumentRequest( - parent="parent_value", - collection_id="collection_id_value", - document_id="document_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_document), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_document(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CreateDocumentRequest( - parent="parent_value", - collection_id="collection_id_value", - document_id="document_id_value", - ) - - -def test_create_document_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc - request = {} - client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_document_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_document - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_document - ] = mock_rpc - - request = {} - await client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_document_async( - transport: str = "grpc_asyncio", request_type=firestore.CreateDocumentRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document.Document( - name="name_value", - ) - ) - response = await client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.CreateDocumentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_create_document_async_from_dict(): - await test_create_document_async(request_type=dict) - - -def test_create_document_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CreateDocumentRequest() - - request.parent = "parent_value" - request.collection_id = "collection_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_document), "__call__") as call: - call.return_value = document.Document() - client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value&collection_id=collection_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_document_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CreateDocumentRequest() - - request.parent = "parent_value" - request.collection_id = "collection_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_document), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) - await client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value&collection_id=collection_id_value", - ) in kw["metadata"] - - -def test_get_document_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_document in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_document] = mock_rpc - - request = {} - client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_document_rest_required_fields(request_type=firestore.GetDocumentRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "mask", - "read_time", - "transaction", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = document.Document() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_document(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_document_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "mask", - "readTime", - "transaction", - ) - ) - & set(("name",)) - ) - - -def test_list_documents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_documents in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc - - request = {} - client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_documents_rest_required_fields( - request_type=firestore.ListDocumentsRequest, -): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "mask", - "order_by", - "page_size", - "page_token", - "read_time", - "show_missing", - "transaction", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.ListDocumentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.ListDocumentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_documents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_documents_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_documents._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "mask", - "orderBy", - "pageSize", - "pageToken", - "readTime", - "showMissing", - "transaction", - ) - ) - & set(("parent",)) - ) - - -def test_list_documents_rest_pager(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - firestore.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(firestore.ListDocumentsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/databases/sample2/documents/sample3/sample4", - "collection_id": "sample5", - } - - pager = client.list_documents(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - - pages = list(client.list_documents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_update_document_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_document in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_document] = mock_rpc - - request = {} - client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_document_rest_required_fields( - request_type=firestore.UpdateDocumentRequest, -): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "current_document", - "mask", - "update_mask", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gf_document.Document() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gf_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_document(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_document_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "currentDocument", - "mask", - "updateMask", - ) - ) - & set(("document",)) - ) - - -def test_update_document_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gf_document.Document() - - # get arguments that satisfy an http rule for this method - sample_request = { - "document": { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - } - - # get truthy value for each flattened field - mock_args = dict( - document=gf_document.Document(name="name_value"), - update_mask=common.DocumentMask(field_paths=["field_paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gf_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{document.name=projects/*/databases/*/documents/*/**}" - % client.transport._host, - args[1], - ) - - -def test_update_document_rest_flattened_error(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_document( - firestore.UpdateDocumentRequest(), - document=gf_document.Document(name="name_value"), - update_mask=common.DocumentMask(field_paths=["field_paths_value"]), - ) - - -def test_delete_document_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_document in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc - - request = {} - client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_document_rest_required_fields( - request_type=firestore.DeleteDocumentRequest, -): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("current_document",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_document(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_document_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(("currentDocument",)) & set(("name",))) - - -def test_delete_document_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/databases/*/documents/*/**}" - % client.transport._host, - args[1], - ) - - -def test_delete_document_rest_flattened_error(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - firestore.DeleteDocumentRequest(), - name="name_value", - ) - - -def test_batch_get_documents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.batch_get_documents in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.batch_get_documents - ] = mock_rpc - - request = {} - client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_get_documents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_batch_get_documents_rest_required_fields( - request_type=firestore.BatchGetDocumentsRequest, -): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).batch_get_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = "database_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).batch_get_documents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == "database_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.BatchGetDocumentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.BatchGetDocumentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.batch_get_documents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_batch_get_documents_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.batch_get_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database",))) - - -def test_begin_transaction_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.begin_transaction in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.begin_transaction - ] = mock_rpc - - request = {} - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.begin_transaction(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_begin_transaction_rest_required_fields( - request_type=firestore.BeginTransactionRequest, -): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).begin_transaction._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = "database_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).begin_transaction._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == "database_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.BeginTransactionResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.begin_transaction(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_begin_transaction_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.begin_transaction._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database",))) - - -def test_begin_transaction_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.BeginTransactionResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"database": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - database="database_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.begin_transaction(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{database=projects/*/databases/*}/documents:beginTransaction" - % client.transport._host, - args[1], - ) - - -def test_begin_transaction_rest_flattened_error(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - firestore.BeginTransactionRequest(), - database="database_value", - ) - - -def test_commit_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.commit in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.commit] = mock_rpc - - request = {} - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.commit(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_commit_rest_required_fields(request_type=firestore.CommitRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).commit._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = "database_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).commit._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == "database_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.CommitResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.commit(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_commit_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.commit._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database",))) - - -def test_commit_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.CommitResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"database": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - database="database_value", - writes=[gf_write.Write(update=document.Document(name="name_value"))], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.commit(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{database=projects/*/databases/*}/documents:commit" - % client.transport._host, - args[1], - ) - - -def test_commit_rest_flattened_error(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - firestore.CommitRequest(), - database="database_value", - writes=[gf_write.Write(update=document.Document(name="name_value"))], - ) - - -def test_rollback_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rollback in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.rollback] = mock_rpc - - request = {} - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rollback(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_rollback_rest_required_fields(request_type=firestore.RollbackRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["database"] = "" - request_init["transaction"] = b"" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).rollback._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = "database_value" - jsonified_request["transaction"] = b"transaction_blob" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).rollback._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == "database_value" - assert "transaction" in jsonified_request - assert jsonified_request["transaction"] == b"transaction_blob" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.rollback(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_rollback_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.rollback._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "database", - "transaction", - ) - ) - ) - - -def test_rollback_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {"database": "projects/sample1/databases/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - database="database_value", - transaction=b"transaction_blob", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.rollback(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{database=projects/*/databases/*}/documents:rollback" - % client.transport._host, - args[1], - ) - - -def test_rollback_rest_flattened_error(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - firestore.RollbackRequest(), - database="database_value", - transaction=b"transaction_blob", - ) - - -def test_run_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.run_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.run_query] = mock_rpc - - request = {} - client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.run_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_run_query_rest_required_fields(request_type=firestore.RunQueryRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).run_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).run_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.RunQueryResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.RunQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.run_query(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_run_query_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.run_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.run_aggregation_query - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.run_aggregation_query - ] = mock_rpc - - request = {} - client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.run_aggregation_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_run_aggregation_query_rest_required_fields( - request_type=firestore.RunAggregationQueryRequest, -): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).run_aggregation_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).run_aggregation_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.RunAggregationQueryResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.RunAggregationQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.run_aggregation_query(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_run_aggregation_query_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.run_aggregation_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -def test_partition_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.partition_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc - - request = {} - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.partition_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_partition_query_rest_required_fields( - request_type=firestore.PartitionQueryRequest, -): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).partition_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).partition_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.PartitionQueryResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.PartitionQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.partition_query(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_partition_query_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.partition_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -def test_partition_query_rest_pager(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - query.Cursor(), - ], - next_page_token="abc", - ), - firestore.PartitionQueryResponse( - partitions=[], - next_page_token="def", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - ], - next_page_token="ghi", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(firestore.PartitionQueryResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/databases/sample2/documents"} - - pager = client.partition_query(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, query.Cursor) for i in results) - - pages = list(client.partition_query(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_write_rest_unimplemented(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = firestore.WriteRequest() - requests = [request] - with pytest.raises(NotImplementedError): - client.write(requests) - - -def test_listen_rest_unimplemented(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = firestore.ListenRequest() - requests = [request] - with pytest.raises(NotImplementedError): - client.listen(requests) - - -def test_list_collection_ids_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_collection_ids in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_collection_ids - ] = mock_rpc - - request = {} - client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_collection_ids(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_collection_ids_rest_required_fields( - request_type=firestore.ListCollectionIdsRequest, -): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_collection_ids._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_collection_ids._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.ListCollectionIdsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.ListCollectionIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_collection_ids(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_collection_ids_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_collection_ids._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -def test_list_collection_ids_rest_flattened(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.ListCollectionIdsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/databases/sample2/documents"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = firestore.ListCollectionIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_collection_ids(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/databases/*/documents}:listCollectionIds" - % client.transport._host, - args[1], - ) - - -def test_list_collection_ids_rest_flattened_error(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_collection_ids( - firestore.ListCollectionIdsRequest(), - parent="parent_value", - ) - - -def test_list_collection_ids_rest_pager(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - str(), - ], - next_page_token="abc", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[], - next_page_token="def", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - ], - next_page_token="ghi", - ), - firestore.ListCollectionIdsResponse( - collection_ids=[ - str(), - str(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - firestore.ListCollectionIdsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/databases/sample2/documents"} - - pager = client.list_collection_ids(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, str) for i in results) - - pages = list(client.list_collection_ids(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_batch_write_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_write in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.batch_write] = mock_rpc - - request = {} - client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_write(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_batch_write_rest_required_fields(request_type=firestore.BatchWriteRequest): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).batch_write._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = "database_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).batch_write._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == "database_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.BatchWriteResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.batch_write(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_batch_write_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.batch_write._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database",))) - - -def test_create_document_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc - - request = {} - client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_document(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_document_rest_required_fields( - request_type=firestore.CreateDocumentRequest, -): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["collection_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - jsonified_request["collectionId"] = "collection_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "document_id", - "mask", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "collectionId" in jsonified_request - assert jsonified_request["collectionId"] == "collection_id_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = document.Document() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_document(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_document_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "documentId", - "mask", - ) - ) - & set( - ( - "parent", - "collectionId", - "document", - ) - ) - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FirestoreClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FirestoreClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = FirestoreClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.FirestoreGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreGrpcTransport, - transports.FirestoreGrpcAsyncIOTransport, - transports.FirestoreRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -def test_transport_kind_grpc(): - transport = FirestoreClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_document_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_document), "__call__") as call: - call.return_value = document.Document() - client.get_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.GetDocumentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_documents_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - call.return_value = firestore.ListDocumentsResponse() - client.list_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.ListDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_document_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - call.return_value = gf_document.Document() - client.update_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.UpdateDocumentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_document_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - call.return_value = None - client.delete_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.DeleteDocumentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_get_documents_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), "__call__" - ) as call: - call.return_value = iter([firestore.BatchGetDocumentsResponse()]) - client.batch_get_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.BatchGetDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_begin_transaction_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - call.return_value = firestore.BeginTransactionResponse() - client.begin_transaction(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.BeginTransactionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_commit_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - call.return_value = firestore.CommitResponse() - client.commit(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.CommitRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rollback_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - call.return_value = None - client.rollback(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.RollbackRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_run_query_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - call.return_value = iter([firestore.RunQueryResponse()]) - client.run_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.RunQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_run_aggregation_query_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - call.return_value = iter([firestore.RunAggregationQueryResponse()]) - client.run_aggregation_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.RunAggregationQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_partition_query_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - call.return_value = firestore.PartitionQueryResponse() - client.partition_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.PartitionQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_collection_ids_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - call.return_value = firestore.ListCollectionIdsResponse() - client.list_collection_ids(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.ListCollectionIdsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_write_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.batch_write), "__call__") as call: - call.return_value = firestore.BatchWriteResponse() - client.batch_write(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.BatchWriteRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_document_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_document), "__call__") as call: - call.return_value = document.Document() - client.create_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.CreateDocumentRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = FirestoreAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_document_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document.Document( - name="name_value", - ) - ) - await client.get_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.GetDocumentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_documents_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListDocumentsResponse( - next_page_token="next_page_token_value", - ) - ) - await client.list_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.ListDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_document_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gf_document.Document( - name="name_value", - ) - ) - await client.update_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.UpdateDocumentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_document_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.DeleteDocumentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_get_documents_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.BatchGetDocumentsResponse()] - ) - await client.batch_get_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.BatchGetDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_begin_transaction_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BeginTransactionResponse( - transaction=b"transaction_blob", - ) - ) - await client.begin_transaction(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.BeginTransactionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_commit_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.CommitResponse() - ) - await client.commit(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.CommitRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_rollback_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.rollback(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.RollbackRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_run_query_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.RunQueryResponse()] - ) - await client.run_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.RunQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_run_aggregation_query_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.RunAggregationQueryResponse()] - ) - await client.run_aggregation_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.RunAggregationQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_partition_query_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.PartitionQueryResponse( - next_page_token="next_page_token_value", - ) - ) - await client.partition_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.PartitionQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_collection_ids_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListCollectionIdsResponse( - collection_ids=["collection_ids_value"], - next_page_token="next_page_token_value", - ) - ) - await client.list_collection_ids(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.ListCollectionIdsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_write_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.batch_write), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BatchWriteResponse() - ) - await client.batch_write(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.BatchWriteRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_document_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document.Document( - name="name_value", - ) - ) - await client.create_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.CreateDocumentRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = FirestoreClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_get_document_rest_bad_request(request_type=firestore.GetDocumentRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_document(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.GetDocumentRequest, - dict, - ], -) -def test_get_document_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_document(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_get_document" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_get_document_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_get_document" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.GetDocumentRequest.pb(firestore.GetDocumentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = document.Document.to_json(document.Document()) - req.return_value.content = return_value - - request = firestore.GetDocumentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = document.Document() - post_with_metadata.return_value = document.Document(), metadata - - client.get_document( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_documents_rest_bad_request(request_type=firestore.ListDocumentsRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/documents/sample3/sample4", - "collection_id": "sample5", - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_documents(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.ListDocumentsRequest, - dict, - ], -) -def test_list_documents_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/documents/sample3/sample4", - "collection_id": "sample5", - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.ListDocumentsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.ListDocumentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_documents(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_list_documents" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_list_documents_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_list_documents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.ListDocumentsRequest.pb(firestore.ListDocumentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore.ListDocumentsResponse.to_json( - firestore.ListDocumentsResponse() - ) - req.return_value.content = return_value - - request = firestore.ListDocumentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.ListDocumentsResponse() - post_with_metadata.return_value = firestore.ListDocumentsResponse(), metadata - - client.list_documents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_document_rest_bad_request(request_type=firestore.UpdateDocumentRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "document": { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_document(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.UpdateDocumentRequest, - dict, - ], -) -def test_update_document_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "document": { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - } - request_init["document"] = { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4", - "fields": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gf_document.Document( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gf_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_document(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gf_document.Document) - assert response.name == "name_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_update_document" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_update_document_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_update_document" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.UpdateDocumentRequest.pb( - firestore.UpdateDocumentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gf_document.Document.to_json(gf_document.Document()) - req.return_value.content = return_value - - request = firestore.UpdateDocumentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gf_document.Document() - post_with_metadata.return_value = gf_document.Document(), metadata - - client.update_document( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_document_rest_bad_request(request_type=firestore.DeleteDocumentRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_document(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.DeleteDocumentRequest, - dict, - ], -) -def test_delete_document_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/databases/sample2/documents/sample3/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "" - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_document(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_delete_document" - ) as pre: - pre.assert_not_called() - pb_message = firestore.DeleteDocumentRequest.pb( - firestore.DeleteDocumentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = firestore.DeleteDocumentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_document( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_batch_get_documents_rest_bad_request( - request_type=firestore.BatchGetDocumentsRequest, -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.batch_get_documents(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.BatchGetDocumentsRequest, - dict, - ], -) -def test_batch_get_documents_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.BatchGetDocumentsResponse( - transaction=b"transaction_blob", - missing="missing_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.BatchGetDocumentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.batch_get_documents(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BatchGetDocumentsResponse) - assert response.transaction == b"transaction_blob" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_documents_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_batch_get_documents" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_batch_get_documents_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_batch_get_documents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.BatchGetDocumentsRequest.pb( - firestore.BatchGetDocumentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore.BatchGetDocumentsResponse.to_json( - firestore.BatchGetDocumentsResponse() - ) - req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) - - request = firestore.BatchGetDocumentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.BatchGetDocumentsResponse() - post_with_metadata.return_value = ( - firestore.BatchGetDocumentsResponse(), - metadata, - ) - - client.batch_get_documents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_begin_transaction_rest_bad_request( - request_type=firestore.BeginTransactionRequest, -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.begin_transaction(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.BeginTransactionRequest, - dict, - ], -) -def test_begin_transaction_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.BeginTransactionResponse( - transaction=b"transaction_blob", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.begin_transaction(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BeginTransactionResponse) - assert response.transaction == b"transaction_blob" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_begin_transaction_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_begin_transaction" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_begin_transaction_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_begin_transaction" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.BeginTransactionRequest.pb( - firestore.BeginTransactionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore.BeginTransactionResponse.to_json( - firestore.BeginTransactionResponse() - ) - req.return_value.content = return_value - - request = firestore.BeginTransactionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.BeginTransactionResponse() - post_with_metadata.return_value = firestore.BeginTransactionResponse(), metadata - - client.begin_transaction( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_commit_rest_bad_request(request_type=firestore.CommitRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.commit(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.CommitRequest, - dict, - ], -) -def test_commit_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.CommitResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.commit(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.CommitResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_commit_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_commit" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_commit_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_commit" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.CommitRequest.pb(firestore.CommitRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore.CommitResponse.to_json(firestore.CommitResponse()) - req.return_value.content = return_value - - request = firestore.CommitRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.CommitResponse() - post_with_metadata.return_value = firestore.CommitResponse(), metadata - - client.commit( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_rollback_rest_bad_request(request_type=firestore.RollbackRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.rollback(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.RollbackRequest, - dict, - ], -) -def test_rollback_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "" - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.rollback(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_rollback" - ) as pre: - pre.assert_not_called() - pb_message = firestore.RollbackRequest.pb(firestore.RollbackRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = firestore.RollbackRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.rollback( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_run_query_rest_bad_request(request_type=firestore.RunQueryRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.run_query(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.RunQueryRequest, - dict, - ], -) -def test_run_query_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.RunQueryResponse( - transaction=b"transaction_blob", - skipped_results=1633, - done=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.RunQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.run_query(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.RunQueryResponse) - assert response.transaction == b"transaction_blob" - assert response.skipped_results == 1633 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_query_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_run_query" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_run_query_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_run_query" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.RunQueryRequest.pb(firestore.RunQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore.RunQueryResponse.to_json(firestore.RunQueryResponse()) - req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) - - request = firestore.RunQueryRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.RunQueryResponse() - post_with_metadata.return_value = firestore.RunQueryResponse(), metadata - - client.run_query( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_run_aggregation_query_rest_bad_request( - request_type=firestore.RunAggregationQueryRequest, -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.run_aggregation_query(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.RunAggregationQueryRequest, - dict, - ], -) -def test_run_aggregation_query_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.RunAggregationQueryResponse( - transaction=b"transaction_blob", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.RunAggregationQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.run_aggregation_query(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.RunAggregationQueryResponse) - assert response.transaction == b"transaction_blob" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_aggregation_query_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_run_aggregation_query" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_run_aggregation_query_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_run_aggregation_query" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.RunAggregationQueryRequest.pb( - firestore.RunAggregationQueryRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore.RunAggregationQueryResponse.to_json( - firestore.RunAggregationQueryResponse() - ) - req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) - - request = firestore.RunAggregationQueryRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.RunAggregationQueryResponse() - post_with_metadata.return_value = ( - firestore.RunAggregationQueryResponse(), - metadata, - ) - - client.run_aggregation_query( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_partition_query_rest_bad_request(request_type=firestore.PartitionQueryRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.partition_query(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.PartitionQueryRequest, - dict, - ], -) -def test_partition_query_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.PartitionQueryResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.PartitionQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.partition_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.PartitionQueryPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_partition_query_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_partition_query" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_partition_query_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_partition_query" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.PartitionQueryRequest.pb( - firestore.PartitionQueryRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore.PartitionQueryResponse.to_json( - firestore.PartitionQueryResponse() - ) - req.return_value.content = return_value - - request = firestore.PartitionQueryRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.PartitionQueryResponse() - post_with_metadata.return_value = firestore.PartitionQueryResponse(), metadata - - client.partition_query( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_write_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - with pytest.raises(NotImplementedError) as not_implemented_error: - client.write({}) - assert "Method Write is not available over REST transport" in str( - not_implemented_error.value - ) - - -def test_listen_rest_error(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - with pytest.raises(NotImplementedError) as not_implemented_error: - client.listen({}) - assert "Method Listen is not available over REST transport" in str( - not_implemented_error.value - ) - - -def test_list_collection_ids_rest_bad_request( - request_type=firestore.ListCollectionIdsRequest, -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_collection_ids(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.ListCollectionIdsRequest, - dict, - ], -) -def test_list_collection_ids_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/databases/sample2/documents"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.ListCollectionIdsResponse( - collection_ids=["collection_ids_value"], - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.ListCollectionIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_collection_ids(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCollectionIdsPager) - assert response.collection_ids == ["collection_ids_value"] - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_collection_ids_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_list_collection_ids" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_list_collection_ids_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_list_collection_ids" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.ListCollectionIdsRequest.pb( - firestore.ListCollectionIdsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore.ListCollectionIdsResponse.to_json( - firestore.ListCollectionIdsResponse() - ) - req.return_value.content = return_value - - request = firestore.ListCollectionIdsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.ListCollectionIdsResponse() - post_with_metadata.return_value = ( - firestore.ListCollectionIdsResponse(), - metadata, - ) - - client.list_collection_ids( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_batch_write_rest_bad_request(request_type=firestore.BatchWriteRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.batch_write(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.BatchWriteRequest, - dict, - ], -) -def test_batch_write_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.BatchWriteResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.batch_write(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BatchWriteResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_write_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_batch_write" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_batch_write_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_batch_write" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.BatchWriteRequest.pb(firestore.BatchWriteRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore.BatchWriteResponse.to_json( - firestore.BatchWriteResponse() - ) - req.return_value.content = return_value - - request = firestore.BatchWriteRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.BatchWriteResponse() - post_with_metadata.return_value = firestore.BatchWriteResponse(), metadata - - client.batch_write( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_document_rest_bad_request(request_type=firestore.CreateDocumentRequest): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/documents/sample3", - "collection_id": "sample4", - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_document(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.CreateDocumentRequest, - dict, - ], -) -def test_create_document_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/databases/sample2/documents/sample3", - "collection_id": "sample4", - } - request_init["document"] = { - "name": "name_value", - "fields": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = firestore.CreateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_document(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_create_document" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_create_document_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_create_document" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.CreateDocumentRequest.pb( - firestore.CreateDocumentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = document.Document.to_json(document.Document()) - req.return_value.content = return_value - - request = firestore.CreateDocumentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = document.Document() - post_with_metadata.return_value = document.Document(), metadata - - client.create_document( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_cancel_operation_rest_bad_request( - request_type=operations_pb2.CancelOperationRequest, -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "{}" - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - request_type=operations_pb2.DeleteOperationRequest, -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "{}" - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - request_type=operations_pb2.GetOperationRequest, -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {"name": "projects/sample1/databases/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - request_type=operations_pb2.ListOperationsRequest, -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/databases/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {"name": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_initialize_client_w_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_document_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_document), "__call__") as call: - client.get_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.GetDocumentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_documents_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_documents), "__call__") as call: - client.list_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.ListDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_document_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_document), "__call__") as call: - client.update_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.UpdateDocumentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_document_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_document), "__call__") as call: - client.delete_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.DeleteDocumentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_get_documents_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_documents), "__call__" - ) as call: - client.batch_get_documents(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.BatchGetDocumentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_begin_transaction_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), "__call__" - ) as call: - client.begin_transaction(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.BeginTransactionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_commit_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.commit), "__call__") as call: - client.commit(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.CommitRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rollback_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.rollback), "__call__") as call: - client.rollback(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.RollbackRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_run_query_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.run_query), "__call__") as call: - client.run_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.RunQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_run_aggregation_query_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), "__call__" - ) as call: - client.run_aggregation_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.RunAggregationQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_partition_query_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.partition_query), "__call__") as call: - client.partition_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.PartitionQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_collection_ids_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_collection_ids), "__call__" - ) as call: - client.list_collection_ids(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.ListCollectionIdsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_write_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.batch_write), "__call__") as call: - client.batch_write(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.BatchWriteRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_document_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_document), "__call__") as call: - client.create_document(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.CreateDocumentRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.FirestoreGrpcTransport, - ) - - -def test_firestore_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.FirestoreTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_firestore_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.FirestoreTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "get_document", - "list_documents", - "update_document", - "delete_document", - "batch_get_documents", - "begin_transaction", - "commit", - "rollback", - "run_query", - "run_aggregation_query", - "partition_query", - "write", - "listen", - "list_collection_ids", - "batch_write", - "create_document", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_firestore_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FirestoreTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - quota_project_id="octopus", - ) - - -def test_firestore_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FirestoreTransport() - adc.assert_called_once() - - -def test_firestore_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - FirestoreClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreGrpcTransport, - transports.FirestoreGrpcAsyncIOTransport, - ], -) -def test_firestore_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FirestoreGrpcTransport, - transports.FirestoreGrpcAsyncIOTransport, - transports.FirestoreRestTransport, - ], -) -def test_firestore_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.FirestoreGrpcTransport, grpc_helpers), - (transports.FirestoreGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_firestore_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "firestore.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - scopes=["1", "2"], - default_host="firestore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport], -) -def test_firestore_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_firestore_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.FirestoreRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_firestore_host_no_port(transport_name): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="firestore.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "firestore.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://firestore.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_firestore_host_with_port(transport_name): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="firestore.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "firestore.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://firestore.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_firestore_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = FirestoreClient( - credentials=creds1, - transport=transport_name, - ) - client2 = FirestoreClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_document._session - session2 = client2.transport.get_document._session - assert session1 != session2 - session1 = client1.transport.list_documents._session - session2 = client2.transport.list_documents._session - assert session1 != session2 - session1 = client1.transport.update_document._session - session2 = client2.transport.update_document._session - assert session1 != session2 - session1 = client1.transport.delete_document._session - session2 = client2.transport.delete_document._session - assert session1 != session2 - session1 = client1.transport.batch_get_documents._session - session2 = client2.transport.batch_get_documents._session - assert session1 != session2 - session1 = client1.transport.begin_transaction._session - session2 = client2.transport.begin_transaction._session - assert session1 != session2 - session1 = client1.transport.commit._session - session2 = client2.transport.commit._session - assert session1 != session2 - session1 = client1.transport.rollback._session - session2 = client2.transport.rollback._session - assert session1 != session2 - session1 = client1.transport.run_query._session - session2 = client2.transport.run_query._session - assert session1 != session2 - session1 = client1.transport.run_aggregation_query._session - session2 = client2.transport.run_aggregation_query._session - assert session1 != session2 - session1 = client1.transport.partition_query._session - session2 = client2.transport.partition_query._session - assert session1 != session2 - session1 = client1.transport.write._session - session2 = client2.transport.write._session - assert session1 != session2 - session1 = client1.transport.listen._session - session2 = client2.transport.listen._session - assert session1 != session2 - session1 = client1.transport.list_collection_ids._session - session2 = client2.transport.list_collection_ids._session - assert session1 != session2 - session1 = client1.transport.batch_write._session - session2 = client2.transport.batch_write._session - assert session1 != session2 - session1 = client1.transport.create_document._session - session2 = client2.transport.create_document._session - assert session1 != session2 - - -def test_firestore_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.FirestoreGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_firestore_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.FirestoreGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport], -) -def test_firestore_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport], -) -def test_firestore_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = FirestoreClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = FirestoreClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = FirestoreClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = FirestoreClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = FirestoreClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = FirestoreClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = FirestoreClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = FirestoreClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = FirestoreClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = FirestoreClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = FirestoreClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.FirestoreTransport, "_prep_wrapped_messages" - ) as prep: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.FirestoreTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = FirestoreClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_delete_operation_from_dict(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_operation_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_cancel_operation_from_dict(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_get_operation_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_get_operation_from_dict(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_list_operations_field_headers(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_list_operations_from_dict(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" - ) - with mock.patch.object( - type(getattr(client.transport, "_grpc_channel")), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - with mock.patch.object( - type(getattr(client.transport, "_grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - with mock.patch.object( - type(getattr(client.transport, "_session")), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - "rest", - "grpc", - ] - for transport in transports: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - - -@pytest.mark.parametrize( - "client_class,transport_class", - [ - (FirestoreClient, transports.FirestoreGrpcTransport), - (FirestoreAsyncClient, transports.FirestoreGrpcAsyncIOTransport), - ], -) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - )